Merge asterix_stabilization into asterix_stabilization_result_distribution.

git-svn-id: https://asterixdb.googlecode.com/svn/branches/asterix_stabilization_result_distribution@1223 eaa15691-b419-025a-1212-ee371bd00084
diff --git a/asterix-algebra/pom.xml b/asterix-algebra/pom.xml
index a6f0ac4..b81e77f 100644
--- a/asterix-algebra/pom.xml
+++ b/asterix-algebra/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
 	<parent>
 		<artifactId>asterix</artifactId>
@@ -6,6 +7,7 @@
 		<version>0.0.4-SNAPSHOT</version>
 	</parent>
 	<artifactId>asterix-algebra</artifactId>
+
 	<build>
 		<plugins>
 			<plugin>
@@ -13,8 +15,8 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
 				</configuration>
 			</plugin>
 			<plugin>
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
index 704ccb5..5c54a4a 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -15,6 +15,7 @@
 
 package edu.uci.ics.asterix.optimizer.rules;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -93,40 +94,44 @@
             }
 
             IAType t = (IAType) env.getType(fce.getArguments().get(0).getValue());
-            switch (t.getTypeTag()) {
-                case ANY: {
-                    return false;
-                }
-                case RECORD: {
-                    ARecordType recType = (ARecordType) t;
-                    ILogicalExpression fai = createFieldAccessByIndex(recType, fce);
-                    if (fai == null) {
+            try {
+                switch (t.getTypeTag()) {
+                    case ANY: {
                         return false;
                     }
-                    expressions.get(i).setValue(fai);
-                    changed = true;
-                    break;
-                }
-                case UNION: {
-                    AUnionType unionT = (AUnionType) t;
-                    if (unionT.isNullableType()) {
-                        IAType t2 = unionT.getUnionList().get(1);
-                        if (t2.getTypeTag() == ATypeTag.RECORD) {
-                            ARecordType recType = (ARecordType) t2;
-                            ILogicalExpression fai = createFieldAccessByIndex(recType, fce);
-                            if (fai == null) {
-                                return false;
-                            }
-                            expressions.get(i).setValue(fai);
-                            changed = true;
-                            break;
+                    case RECORD: {
+                        ARecordType recType = (ARecordType) t;
+                        ILogicalExpression fai = createFieldAccessByIndex(recType, fce);
+                        if (fai == null) {
+                            return false;
                         }
+                        expressions.get(i).setValue(fai);
+                        changed = true;
+                        break;
                     }
-                    throw new NotImplementedException("Union " + unionT);
+                    case UNION: {
+                        AUnionType unionT = (AUnionType) t;
+                        if (unionT.isNullableType()) {
+                            IAType t2 = unionT.getUnionList().get(1);
+                            if (t2.getTypeTag() == ATypeTag.RECORD) {
+                                ARecordType recType = (ARecordType) t2;
+                                ILogicalExpression fai = createFieldAccessByIndex(recType, fce);
+                                if (fai == null) {
+                                    return false;
+                                }
+                                expressions.get(i).setValue(fai);
+                                changed = true;
+                                break;
+                            }
+                        }
+                        throw new NotImplementedException("Union " + unionT);
+                    }
+                    default: {
+                        throw new AlgebricksException("Cannot call field-access on data of type " + t);
+                    }
                 }
-                default: {
-                    throw new AlgebricksException("Cannot call field-access on data of type " + t);
-                }
+            } catch (IOException e) {
+                throw new AlgebricksException(e);
             }
         }
         assign.removeAnnotation(AsterixOperatorAnnotations.PUSHED_FIELD_ACCESS);
@@ -134,7 +139,8 @@
     }
 
     @SuppressWarnings("unchecked")
-    private static ILogicalExpression createFieldAccessByIndex(ARecordType recType, AbstractFunctionCallExpression fce) {
+    private static ILogicalExpression createFieldAccessByIndex(ARecordType recType, AbstractFunctionCallExpression fce)
+            throws IOException {
         String s = getStringSecondArgument(fce);
         if (s == null) {
             return null;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
index bc61c49..b0ce342 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -16,6 +16,7 @@
 package edu.uci.ics.asterix.optimizer.rules;
 
 import java.io.DataInputStream;
+import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.List;
 
@@ -188,7 +189,12 @@
                 ARecordType rt = (ARecordType) _emptyTypeEnv.getType(expr.getArguments().get(0).getValue());
                 String str = ((AString) ((AsterixConstantValue) ((ConstantExpression) expr.getArguments().get(1)
                         .getValue()).getValue()).getObject()).getStringValue();
-                int k = rt.findFieldPosition(str);
+                int k;
+                try {
+                    k = rt.findFieldPosition(str);
+                } catch (IOException e) {
+                    throw new AlgebricksException(e);
+                }
                 if (k >= 0) {
                     // wait for the ByNameToByIndex rule to apply
                     return new Pair<Boolean, ILogicalExpression>(changed, expr);
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
index ad70d6f..3dad464 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
@@ -186,6 +186,7 @@
                 case DATE:
                 case TIME:
                 case DURATION:
+                case INTERVAL:
                 case POINT:
                 case POINT3D:
                 case POLYGON:
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java
index c90b3f1..e88e5b0 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java
@@ -33,24 +33,22 @@
 
 /**
  * Looks for a select operator, containing a condition:
- * 
  * similarity-function GE/GT/LE/LE constant/variable
- * 
  * Rewrites the select condition (and possibly the assign expr) with the equivalent similarity-check function.
- * 
  */
 public class SimilarityCheckRule implements IAlgebraicRewriteRule {
 
     @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-    	AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
         // Look for select.
         if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
             return false;
         }
         SelectOperator select = (SelectOperator) op;
         Mutable<ILogicalExpression> condExpr = select.getCondition();
-        
+
         // Gather assigns below this select.
         List<AssignOperator> assigns = new ArrayList<AssignOperator>();
         AbstractLogicalOperator childOp = (AbstractLogicalOperator) select.getInputs().get(0).getValue();
@@ -60,12 +58,13 @@
         }
         while (childOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
             assigns.add((AssignOperator) childOp);
-        	childOp = (AbstractLogicalOperator) childOp.getInputs().get(0).getValue();
+            childOp = (AbstractLogicalOperator) childOp.getInputs().get(0).getValue();
         }
         return replaceSelectConditionExprs(condExpr, assigns, context);
     }
 
-    private boolean replaceSelectConditionExprs(Mutable<ILogicalExpression> expRef, List<AssignOperator> assigns, IOptimizationContext context) throws AlgebricksException {
+    private boolean replaceSelectConditionExprs(Mutable<ILogicalExpression> expRef, List<AssignOperator> assigns,
+            IOptimizationContext context) throws AlgebricksException {
         ILogicalExpression expr = expRef.getValue();
         if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
             return false;
@@ -83,10 +82,10 @@
             }
             return found;
         }
-        
+
         // Look for GE/GT/LE/LT.
-        if (funcIdent != AlgebricksBuiltinFunctions.GE && funcIdent != AlgebricksBuiltinFunctions.GT &&
-                funcIdent != AlgebricksBuiltinFunctions.LE && funcIdent != AlgebricksBuiltinFunctions.LT) {
+        if (funcIdent != AlgebricksBuiltinFunctions.GE && funcIdent != AlgebricksBuiltinFunctions.GT
+                && funcIdent != AlgebricksBuiltinFunctions.LE && funcIdent != AlgebricksBuiltinFunctions.LT) {
             return false;
         }
 
@@ -98,8 +97,8 @@
         // Normalized GE/GT/LE/LT as if constant was on the right hand side.
         FunctionIdentifier normFuncIdent = null;
         // One of the args must be a constant.
-        if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT) {                    	
-        	ConstantExpression constExpr = (ConstantExpression) arg1;
+        if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+            ConstantExpression constExpr = (ConstantExpression) arg1;
             constVal = (AsterixConstantValue) constExpr.getValue();
             nonConstExpr = arg2;
             // Get func ident as if swapping lhs and rhs.            
@@ -113,91 +112,101 @@
         } else {
             return false;
         }
-        
+
         // The other arg is a function call. We can directly replace the select condition with an equivalent similarity check expression.
         if (nonConstExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-        	return replaceWithFunctionCallArg(expRef, normFuncIdent, constVal, (AbstractFunctionCallExpression) nonConstExpr);
+            return replaceWithFunctionCallArg(expRef, normFuncIdent, constVal,
+                    (AbstractFunctionCallExpression) nonConstExpr);
         }
         // The other arg ist a variable. We may have to introduce an assign operator that assigns the result of a similarity-check function to a variable.
         if (nonConstExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-        	return replaceWithVariableArg(expRef, normFuncIdent, constVal, (VariableReferenceExpression) nonConstExpr, assigns, context);
+            return replaceWithVariableArg(expRef, normFuncIdent, constVal, (VariableReferenceExpression) nonConstExpr,
+                    assigns, context);
         }
         return false;
     }
-    
+
     private boolean replaceWithVariableArg(Mutable<ILogicalExpression> expRef, FunctionIdentifier normFuncIdent,
-    		AsterixConstantValue constVal, VariableReferenceExpression varRefExpr, List<AssignOperator> assigns, IOptimizationContext context) throws AlgebricksException {
-    	
-    	// Find variable in assigns to determine its originating function.    	
-    	LogicalVariable var = varRefExpr.getVariableReference();
-    	Mutable<ILogicalExpression> simFuncExprRef = null;
-    	ScalarFunctionCallExpression simCheckFuncExpr = null;
-    	AssignOperator matchingAssign = null;
-     	for (int i = 0; i < assigns.size(); i++) {
-    		AssignOperator assign = assigns.get(i);
-    		for (int j = 0; j < assign.getVariables().size(); j++) {
-    			// Check if variables match.
-    			if (var != assign.getVariables().get(j)) {
-    				continue;
-    			}
-    			// Check if corresponding expr is a function call.
-    			if (assign.getExpressions().get(j).getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-    				continue;
-    			}
-    			simFuncExprRef = assign.getExpressions().get(j);
-    			// Analyze function expression and get equivalent similarity check function.
-    			simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal, (AbstractFunctionCallExpression) simFuncExprRef.getValue());
-    			matchingAssign = assign;
-    			break;
-    		}
-    		if (simCheckFuncExpr != null) {
-    			break;
-    		}
-    	}
-    	
-    	// Only non-null if we found that varRefExpr refers to an optimizable similarity function call. 
-    	if (simCheckFuncExpr != null) {
-    		// Create a new assign under matchingAssign which assigns the result of our similarity-check function to a variable.
-    		LogicalVariable newVar = context.newVar();
-    		AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(simCheckFuncExpr));
-    		// Hook up inputs. 
-    		newAssign.getInputs().add(new MutableObject<ILogicalOperator>(matchingAssign.getInputs().get(0).getValue()));
-    		matchingAssign.getInputs().get(0).setValue(newAssign);    		
-    		
-    		// Replace select condition with a get-item on newVar.
+            AsterixConstantValue constVal, VariableReferenceExpression varRefExpr, List<AssignOperator> assigns,
+            IOptimizationContext context) throws AlgebricksException {
+
+        // Find variable in assigns to determine its originating function.    	
+        LogicalVariable var = varRefExpr.getVariableReference();
+        Mutable<ILogicalExpression> simFuncExprRef = null;
+        ScalarFunctionCallExpression simCheckFuncExpr = null;
+        AssignOperator matchingAssign = null;
+        for (int i = 0; i < assigns.size(); i++) {
+            AssignOperator assign = assigns.get(i);
+            for (int j = 0; j < assign.getVariables().size(); j++) {
+                // Check if variables match.
+                if (var != assign.getVariables().get(j)) {
+                    continue;
+                }
+                // Check if corresponding expr is a function call.
+                if (assign.getExpressions().get(j).getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                    continue;
+                }
+                simFuncExprRef = assign.getExpressions().get(j);
+                // Analyze function expression and get equivalent similarity check function.
+                simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal,
+                        (AbstractFunctionCallExpression) simFuncExprRef.getValue());
+                matchingAssign = assign;
+                break;
+            }
+            if (simCheckFuncExpr != null) {
+                break;
+            }
+        }
+
+        // Only non-null if we found that varRefExpr refers to an optimizable similarity function call. 
+        if (simCheckFuncExpr != null) {
+            // Create a new assign under matchingAssign which assigns the result of our similarity-check function to a variable.
+            LogicalVariable newVar = context.newVar();
+            AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(
+                    simCheckFuncExpr));
+            // Hook up inputs. 
+            newAssign.getInputs()
+                    .add(new MutableObject<ILogicalOperator>(matchingAssign.getInputs().get(0).getValue()));
+            matchingAssign.getInputs().get(0).setValue(newAssign);
+
+            // Replace select condition with a get-item on newVar.
             List<Mutable<ILogicalExpression>> selectGetItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
             // First arg is a variable reference expr on newVar.
             selectGetItemArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(newVar)));
             // Second arg is the item index to be accessed, here 0.
-            selectGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(0)))));
-            ILogicalExpression selectGetItemExpr = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), selectGetItemArgs);
+            selectGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
+                    new AsterixConstantValue(new AInt32(0)))));
+            ILogicalExpression selectGetItemExpr = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), selectGetItemArgs);
             // Replace the old similarity function call with the new getItemExpr.
             expRef.setValue(selectGetItemExpr);
-    		
+
             // Replace expr corresponding to original variable in the original assign with a get-item on newVar.
             List<Mutable<ILogicalExpression>> assignGetItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
             // First arg is a variable reference expr on newVar.
             assignGetItemArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(newVar)));
             // Second arg is the item index to be accessed, here 1.
-            assignGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(1)))));
-            ILogicalExpression assignGetItemExpr = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), assignGetItemArgs);
+            assignGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
+                    new AsterixConstantValue(new AInt32(1)))));
+            ILogicalExpression assignGetItemExpr = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), assignGetItemArgs);
             // Replace the original assign expr with the get-item expr.
             simFuncExprRef.setValue(assignGetItemExpr);
-    		
+
             context.computeAndSetTypeEnvironmentForOperator(newAssign);
             context.computeAndSetTypeEnvironmentForOperator(matchingAssign);
-            
-    		return true;
-    	}
-    	
-    	return false;
+
+            return true;
+        }
+
+        return false;
     }
-    
+
     private boolean replaceWithFunctionCallArg(Mutable<ILogicalExpression> expRef, FunctionIdentifier normFuncIdent,
-    		AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) {
-    	// Analyze func expr to see if it is an optimizable similarity function.
-        ScalarFunctionCallExpression simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal, funcExpr); 
-        
+            AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) {
+        // Analyze func expr to see if it is an optimizable similarity function.
+        ScalarFunctionCallExpression simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal, funcExpr);
+
         // Replace the expr in the select condition.
         if (simCheckFuncExpr != null) {
             // Get item 0 from var.
@@ -205,8 +214,10 @@
             // First arg is the similarity-check function call.
             getItemArgs.add(new MutableObject<ILogicalExpression>(simCheckFuncExpr));
             // Second arg is the item index to be accessed.
-            getItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(0)))));
-            ILogicalExpression getItemExpr = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), getItemArgs);
+            getItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                    new AInt32(0)))));
+            ILogicalExpression getItemExpr = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), getItemArgs);
             // Replace the old similarity function call with the new getItemExpr.
             expRef.setValue(getItemExpr);
             return true;
@@ -214,10 +225,10 @@
 
         return false;
     }
-    
+
     private ScalarFunctionCallExpression getSimilarityCheckExpr(FunctionIdentifier normFuncIdent,
-    		AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) {
-    	// Remember args from original similarity function to add them to the similarity-check function later.
+            AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) {
+        // Remember args from original similarity function to add them to the similarity-check function later.
         ArrayList<Mutable<ILogicalExpression>> similarityArgs = null;
         ScalarFunctionCallExpression simCheckFuncExpr = null; 
         // Look for jaccard function call, and GE or GT.
@@ -270,11 +281,11 @@
         }
         // Preserve all annotations.
         if (simCheckFuncExpr != null) {
-        	simCheckFuncExpr.getAnnotations().putAll(funcExpr.getAnnotations());
+            simCheckFuncExpr.getAnnotations().putAll(funcExpr.getAnnotations());
         }
         return simCheckFuncExpr;
     }
-    
+
     private FunctionIdentifier getLhsAndRhsSwappedFuncIdent(FunctionIdentifier oldFuncIdent) {
         if (oldFuncIdent == AlgebricksBuiltinFunctions.GE) {
             return AlgebricksBuiltinFunctions.LE;
@@ -290,7 +301,7 @@
         }
         throw new IllegalStateException();
     }
-    
+
     @Override
     public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
         return false;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
index 6651ea3..ffd447c 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
@@ -1,5 +1,21 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.optimizer.rules.am;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -44,7 +60,8 @@
  * Static helper functions for rewriting plans using indexes.
  */
 public class AccessMethodUtils {
-    public static void appendPrimaryIndexTypes(Dataset dataset, IAType itemType, List<Object> target) {
+    public static void appendPrimaryIndexTypes(Dataset dataset, IAType itemType, List<Object> target)
+            throws IOException {
         ARecordType recordType = (ARecordType) itemType;
         List<String> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
         for (String partitioningKey : partitioningKeys) {
@@ -109,7 +126,7 @@
         analysisCtx.matchedFuncExprs.add(new OptimizableFuncExpr(funcExpr, fieldVar, constFilterVal));
         return true;
     }
-    
+
     public static boolean analyzeFuncExprArgsForTwoVars(AbstractFunctionCallExpression funcExpr,
             AccessMethodAnalysisContext analysisCtx) {
         LogicalVariable fieldVar1 = null;
@@ -180,7 +197,11 @@
         // Primary keys.
         List<String> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
         for (String partitioningKey : partitioningKeys) {
-            dest.add(recordType.getFieldType(partitioningKey));
+            try {
+                dest.add(recordType.getFieldType(partitioningKey));
+            } catch (IOException e) {
+                throw new AlgebricksException(e);
+            }
         }
     }
 
@@ -229,14 +250,14 @@
     }
 
     /**
-     *  Returns the first expr optimizable by this index.
+     * Returns the first expr optimizable by this index.
      */
     public static IOptimizableFuncExpr chooseFirstOptFuncExpr(Index chosenIndex, AccessMethodAnalysisContext analysisCtx) {
         List<Integer> indexExprs = analysisCtx.getIndexExprs(chosenIndex);
         int firstExprIndex = indexExprs.get(0);
         return analysisCtx.matchedFuncExprs.get(firstExprIndex);
     }
-    
+
     public static UnnestMapOperator createSecondaryIndexUnnestMap(Dataset dataset, ARecordType recordType, Index index,
             ILogicalOperator inputOp, AccessMethodJobGenParams jobGenParams, IOptimizationContext context,
             boolean outputPrimaryKeysOnly, boolean retainInput) throws AlgebricksException {
@@ -299,7 +320,11 @@
         List<Object> primaryIndexOutputTypes = new ArrayList<Object>();
         // Append output variables/types generated by the primary-index search (not forwarded from input).
         primaryIndexUnnestVars.addAll(dataSourceScan.getVariables());
-        appendPrimaryIndexTypes(dataset, recordType, primaryIndexOutputTypes);
+        try {
+            appendPrimaryIndexTypes(dataset, recordType, primaryIndexOutputTypes);
+        } catch (IOException e) {
+            throw new AlgebricksException(e);
+        }
         // An index search is expressed as an unnest over an index-search function.
         IFunctionInfo primaryIndexSearch = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.INDEX_SEARCH);
         AbstractFunctionCallExpression primaryIndexSearchFunc = new ScalarFunctionCallExpression(primaryIndexSearch,
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
index 5c65299..3b547e7 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
@@ -1,5 +1,21 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.optimizer.rules.am;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.HashSet;
@@ -205,7 +221,7 @@
                 if (optFuncExpr.getNumLogicalVars() > 1) {
                     // If we are optimizing a join, the matching field may be the second field name.
                     keyPos = indexOf(optFuncExpr.getFieldName(1), chosenIndex.getKeyFieldNames());
-                }                
+                }
             }
             if (keyPos < 0) {
                 throw new AlgebricksException(
@@ -238,7 +254,7 @@
                     // If high and low keys are set, we exit for now.
                     if (setLowKeys.cardinality() == numSecondaryKeys && setHighKeys.cardinality() == numSecondaryKeys) {
                         doneWithExprs = true;
-                    }                    
+                    }
                     break;
                 }
                 case HIGH_EXCLUSIVE: {
@@ -390,7 +406,11 @@
                     secondaryIndexUnnestOp, context, true, retainInput, false);
         } else {
             List<Object> primaryIndexOutputTypes = new ArrayList<Object>();
-            AccessMethodUtils.appendPrimaryIndexTypes(dataset, recordType, primaryIndexOutputTypes);
+            try {
+                AccessMethodUtils.appendPrimaryIndexTypes(dataset, recordType, primaryIndexOutputTypes);
+            } catch (IOException e) {
+                throw new AlgebricksException(e);
+            }
             primaryIndexUnnestOp = new UnnestMapOperator(dataSourceScan.getVariables(),
                     secondaryIndexUnnestOp.getExpressionRef(), primaryIndexOutputTypes, retainInput);
             primaryIndexUnnestOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
@@ -521,7 +541,7 @@
             return (optFuncExpr.getOperatorSubTree(0) == null || optFuncExpr.getOperatorSubTree(0) == probeSubTree);
         }
     }
-    
+
     private ILogicalExpression createSelectCondition(List<Mutable<ILogicalExpression>> predList) {
         if (predList.size() > 1) {
             IFunctionInfo finfo = AsterixBuiltinFunctions.getAsterixFunctionInfo(AlgebricksBuiltinFunctions.AND);
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
index 9163e35..16cfefe 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
@@ -15,8 +15,10 @@
 package edu.uci.ics.asterix.translator;
 
 import java.util.Map;
+import java.util.Map.Entry;
 
 import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.DatasetDecl;
 import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
 import edu.uci.ics.asterix.aql.expression.DeleteStatement;
 import edu.uci.ics.asterix.aql.expression.DropStatement;
@@ -27,10 +29,11 @@
 import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinTypeMap;
 import edu.uci.ics.asterix.metadata.entities.Dataverse;
 import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
 
 /**
- * Base class for AQL translators. 
- * Contains the common validation logic for AQL statements.
+ * Base class for AQL translators. Contains the common validation logic for AQL
+ * statements.
  */
 public abstract class AbstractAqlTranslator {
 
@@ -93,6 +96,26 @@
                             + MetadataConstants.METADATA_DATAVERSE_NAME;
                 }
                 break;
+            case DATASET_DECL:
+                DatasetDecl datasetStmt = (DatasetDecl) stmt;
+                Map<String, String> hints = datasetStmt.getHints();
+                if (hints != null && !hints.isEmpty()) {
+                    Pair<Boolean, String> validationResult = null;
+                    StringBuffer errorMsgBuffer = new StringBuffer();
+                    for (Entry<String, String> hint : hints.entrySet()) {
+                        validationResult = DatasetHints.validate(hint.getKey(), hint.getValue());
+                        if (!validationResult.first) {
+                            errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue()
+                                    + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
+                            errorMsgBuffer.append(" \n");
+                        }
+                    }
+                    invalidOperation = errorMsgBuffer.length() > 0;
+                    if (invalidOperation) {
+                        message = errorMsgBuffer.toString();
+                    }
+                }
+                break;
 
         }
 
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/DatasetHints.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/DatasetHints.java
new file mode 100644
index 0000000..0b66c30
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/DatasetHints.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+
+/**
+ * Collection of hints supported by create dataset statement.
+ * Includes method to validate a hint and its associated value
+ * as provided in a create dataset statement.
+ */
+public class DatasetHints {
+
+    /**
+     * validate the use of a hint
+     * 
+     * @param hintName
+     *            name of the hint
+     * @param value
+     *            value associated with the hint
+     * @return a Pair with
+     *         first element as a boolean that represents the validation result.
+     *         second element as the error message if the validation result is false
+     */
+    public static Pair<Boolean, String> validate(String hintName, String value) {
+        for (IHint h : hints) {
+            if (h.getName().equalsIgnoreCase(hintName.trim())) {
+                return h.validateValue(value);
+            }
+        }
+        return new Pair<Boolean, String>(false, "Unknwon hint :" + hintName);
+    }
+
+    private static Set<IHint> hints = initHints();
+
+    private static Set<IHint> initHints() {
+        Set<IHint> hints = new HashSet<IHint>();
+        hints.add(new DatasetCardinalityHint());
+        return hints;
+    }
+
+    /**
+     * Hint representing the expected number of tuples in the dataset.
+     */
+    public static class DatasetCardinalityHint implements IHint {
+        public static final String NAME = "CARDINALITY";
+
+        public static final long DEFAULT = 1000000L;
+
+        @Override
+        public String getName() {
+            return NAME;
+        }
+
+        @Override
+        public Pair<Boolean, String> validateValue(String value) {
+            boolean valid = true;
+            long longValue;
+            try {
+                longValue = Long.parseLong(value);
+                if (longValue < 0) {
+                    return new Pair<Boolean, String>(false, "Value must be >= 0");
+                }
+            } catch (NumberFormatException nfe) {
+                valid = false;
+                return new Pair<Boolean, String>(valid, "Inappropriate value");
+            }
+            return new Pair<Boolean, String>(true, null);
+        }
+
+    }
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/IHint.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/IHint.java
new file mode 100644
index 0000000..1c9fd7b
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/IHint.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+
+/**
+ * Represents a hint provided as part of an AQL statement.
+ */
+public interface IHint {
+
+    /**
+     * retrieve the name of the hint.
+     * 
+     * @return
+     */
+    public String getName();
+
+    /**
+     * validate the value associated with the hint.
+     * 
+     * @param value
+     *            the value associated with the hint.
+     * @return a Pair with
+     *         first element as a boolean that represents the validation result.
+     *         second element as the error message if the validation result is false
+     */
+    public Pair<Boolean, String> validateValue(String value);
+
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TypeTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TypeTranslator.java
index 4047d9a..5abbd91 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TypeTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TypeTranslator.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -12,6 +12,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package edu.uci.ics.asterix.translator;
 
 import java.util.ArrayList;
@@ -29,6 +30,7 @@
 import edu.uci.ics.asterix.aql.expression.UnorderedListTypeDefinition;
 import edu.uci.ics.asterix.common.annotations.IRecordFieldDataGen;
 import edu.uci.ics.asterix.common.annotations.RecordDataGenAnnotation;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.MetadataManager;
 import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
@@ -89,41 +91,45 @@
             throw new AlgebricksException("Cannot redefine builtin type " + tdname + " .");
         }
         TypeSignature typeSignature = new TypeSignature(typeDataverse, tdname);
-        switch (texpr.getTypeKind()) {
-            case TYPEREFERENCE: {
-                TypeReferenceExpression tre = (TypeReferenceExpression) texpr;
-                IAType t = solveTypeReference(typeSignature, typeMap);
-                if (t != null) {
-                    typeMap.put(typeSignature, t);
-                } else {
-                    addIncompleteTopLevelTypeReference(tdname, tre, incompleteTopLevelTypeReferences, typeDataverse);
+        try {
+            switch (texpr.getTypeKind()) {
+                case TYPEREFERENCE: {
+                    TypeReferenceExpression tre = (TypeReferenceExpression) texpr;
+                    IAType t = solveTypeReference(typeSignature, typeMap);
+                    if (t != null) {
+                        typeMap.put(typeSignature, t);
+                    } else {
+                        addIncompleteTopLevelTypeReference(tdname, tre, incompleteTopLevelTypeReferences, typeDataverse);
+                    }
+                    break;
                 }
-                break;
+                case RECORD: {
+                    RecordTypeDefinition rtd = (RecordTypeDefinition) texpr;
+                    ARecordType recType = computeRecordType(typeSignature, rtd, typeMap, incompleteFieldTypes,
+                            incompleteItemTypes, typeDataverse);
+                    typeMap.put(typeSignature, recType);
+                    break;
+                }
+                case ORDEREDLIST: {
+                    OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) texpr;
+                    AOrderedListType olType = computeOrderedListType(typeSignature, oltd, typeMap, incompleteItemTypes,
+                            incompleteFieldTypes, typeDataverse);
+                    typeMap.put(typeSignature, olType);
+                    break;
+                }
+                case UNORDEREDLIST: {
+                    UnorderedListTypeDefinition ultd = (UnorderedListTypeDefinition) texpr;
+                    AUnorderedListType ulType = computeUnorderedListType(typeSignature, ultd, typeMap,
+                            incompleteItemTypes, incompleteFieldTypes, typeDataverse);
+                    typeMap.put(typeSignature, ulType);
+                    break;
+                }
+                default: {
+                    throw new IllegalStateException();
+                }
             }
-            case RECORD: {
-                RecordTypeDefinition rtd = (RecordTypeDefinition) texpr;
-                ARecordType recType = computeRecordType(typeSignature, rtd, typeMap, incompleteFieldTypes,
-                        incompleteItemTypes, typeDataverse);
-                typeMap.put(typeSignature, recType);
-                break;
-            }
-            case ORDEREDLIST: {
-                OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) texpr;
-                AOrderedListType olType = computeOrderedListType(typeSignature, oltd, typeMap, incompleteItemTypes,
-                        incompleteFieldTypes, typeDataverse);
-                typeMap.put(typeSignature, olType);
-                break;
-            }
-            case UNORDEREDLIST: {
-                UnorderedListTypeDefinition ultd = (UnorderedListTypeDefinition) texpr;
-                AUnorderedListType ulType = computeUnorderedListType(typeSignature, ultd, typeMap, incompleteItemTypes,
-                        incompleteFieldTypes, typeDataverse);
-                typeMap.put(typeSignature, ulType);
-                break;
-            }
-            default: {
-                throw new IllegalStateException();
-            }
+        } catch (AsterixException e) {
+            throw new AlgebricksException(e);
         }
     }
 
@@ -181,7 +187,7 @@
                 }
                 t = dt.getDatatype();
             } else {
-                t = typeMap.get(typeSignature);   
+                t = typeMap.get(typeSignature);
             }
             for (AbstractCollectionType act : incompleteItemTypes.get(typeSignature)) {
                 act.setItemType(t);
@@ -191,7 +197,8 @@
 
     private static AOrderedListType computeOrderedListType(TypeSignature typeSignature, OrderedListTypeDefinition oltd,
             Map<TypeSignature, IAType> typeMap, Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
-            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaultDataverse) {
+            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaultDataverse)
+            throws AsterixException {
         TypeExpression tExpr = oltd.getItemTypeExpression();
         String typeName = typeSignature != null ? typeSignature.getName() : null;
         AOrderedListType aolt = new AOrderedListType(null, typeName);
@@ -202,7 +209,8 @@
     private static AUnorderedListType computeUnorderedListType(TypeSignature typeSignature,
             UnorderedListTypeDefinition ultd, Map<TypeSignature, IAType> typeMap,
             Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
-            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaulDataverse) {
+            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaulDataverse)
+            throws AsterixException {
         TypeExpression tExpr = ultd.getItemTypeExpression();
         String typeName = typeSignature != null ? typeSignature.getName() : null;
         AUnorderedListType ault = new AUnorderedListType(null, typeName);
@@ -213,7 +221,7 @@
     private static void setCollectionItemType(TypeExpression tExpr, Map<TypeSignature, IAType> typeMap,
             Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
             Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, AbstractCollectionType act,
-            String defaultDataverse) {
+            String defaultDataverse) throws AsterixException {
         switch (tExpr.getTypeKind()) {
             case ORDEREDLIST: {
                 OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) tExpr;
@@ -306,7 +314,8 @@
 
     private static ARecordType computeRecordType(TypeSignature typeSignature, RecordTypeDefinition rtd,
             Map<TypeSignature, IAType> typeMap, Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes,
-            Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes, String defaultDataverse) {
+            Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes, String defaultDataverse)
+            throws AsterixException {
         List<String> names = rtd.getFieldNames();
         int n = names.size();
         String[] fldNames = new String[n];
@@ -318,14 +327,14 @@
         boolean isOpen = rtd.getRecordKind() == RecordKind.OPEN;
         ARecordType recType = new ARecordType(typeSignature == null ? null : typeSignature.getName(), fldNames,
                 fldTypes, isOpen);
-        
+
         List<IRecordFieldDataGen> fieldDataGen = rtd.getFieldDataGen();
         if (fieldDataGen.size() == n) {
             IRecordFieldDataGen[] rfdg = new IRecordFieldDataGen[n];
             rfdg = fieldDataGen.toArray(rfdg);
             recType.getAnnotations().add(new RecordDataGenAnnotation(rfdg, rtd.getUndeclaredFieldsDataGen()));
         }
-        
+
         for (int j = 0; j < n; j++) {
             TypeExpression texpr = rtd.getFieldTypes().get(j);
             switch (texpr.getTypeKind()) {
diff --git a/asterix-app/data/temporal/temporalData.json b/asterix-app/data/temporal/temporalData.json
new file mode 100644
index 0000000..520fd4b
--- /dev/null
+++ b/asterix-app/data/temporal/temporalData.json
@@ -0,0 +1,3 @@
+{"id": "001", "dateField": date("-2012-12-12"), "dateFieldPlus": date("0990-01-01"), "timeField": time("23:49:12.39Z"), "timeFieldPlus": time("03:23:12.2"), "datetimeField": datetime("2012-12-12T00:00:00.001"), "datetimeFieldPlus": datetime("-00130810T221015398"), "durationField": duration("P20Y19DT3H74M23.34S"), "durationFieldPlus": duration("-P2MT4M300.68S"), "intervalField": dtinterval("2012-12-12T00:00:00.001,20130810T221015398") }
+{"id": "002", "datetimeField": datetime("19201220T232918478") }
+{"id": "003", "intervalPlus": tinterval("19:23:32.328Z,23:20:20") }
\ No newline at end of file
diff --git a/asterix-app/data/temporal/temporalData.txt b/asterix-app/data/temporal/temporalData.txt
new file mode 100644
index 0000000..9ce94f5
--- /dev/null
+++ b/asterix-app/data/temporal/temporalData.txt
@@ -0,0 +1,4 @@
+001|-2012-12-12|23:49:12.39Z|3827-12-12T11:43:29.329|P20Y19DT3H74M23.34S
+002|1993-12-12|03:32:00|-2012-12-12T05:00:23.071|P20Y19D
+003|1839-03-12|12:30:49.382|1012-06-12T00:37:00|PT3H74M23.34S
+999|0003-11-02|23:19:32.382Z|2012-12-12T00:00:00.001|P20YT300H9.34S
diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index 7c4c484..5af59fd 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -17,8 +17,8 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
 				</configuration>
 			</plugin>
 			<plugin>
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/ADMCursor.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/ADMCursor.java
deleted file mode 100644
index cad8760..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/ADMCursor.java
+++ /dev/null
@@ -1,397 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-import edu.uci.ics.asterix.om.base.ABinary;
-import edu.uci.ics.asterix.om.base.ABitArray;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ACircle;
-import edu.uci.ics.asterix.om.base.ADate;
-import edu.uci.ics.asterix.om.base.ADateTime;
-import edu.uci.ics.asterix.om.base.ADouble;
-import edu.uci.ics.asterix.om.base.ADuration;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt16;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AInt64;
-import edu.uci.ics.asterix.om.base.AInt8;
-import edu.uci.ics.asterix.om.base.ALine;
-import edu.uci.ics.asterix.om.base.APoint;
-import edu.uci.ics.asterix.om.base.APoint3D;
-import edu.uci.ics.asterix.om.base.APolygon;
-import edu.uci.ics.asterix.om.base.ARecord;
-import edu.uci.ics.asterix.om.base.ARectangle;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.base.ATime;
-import edu.uci.ics.asterix.om.base.IACollection;
-import edu.uci.ics.asterix.om.base.IACursor;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AbstractCollectionType;
-import edu.uci.ics.asterix.om.types.IAType;
-
-/**
- * This class is the implementation of IADMCursor. This class supports iterating
- * over all objects in ASTERIX. All ASTERIX objects can be iterated over and
- * returned via the associated get<Type>() call.
- * 
- * @author zheilbron
- */
-public class ADMCursor implements IADMCursor {
-    protected IAObject currentObject;
-    protected IACursor collectionCursor;
-    private boolean readOnce;
-
-    public ADMCursor(IAObject currentObject) {
-        setCurrentObject(currentObject);
-    }
-
-    public boolean next() throws AQLJException {
-        if (collectionCursor != null) {
-            boolean next = collectionCursor.next();
-            if (next) {
-                currentObject = collectionCursor.get();
-            }
-            return next;
-        } else if (currentObject == null) {
-            return false;
-        } else {
-            if (!readOnce) {
-                readOnce = true;
-                return true;
-            }
-        }
-        return false;
-    }
-
-    @Override
-    public void position(IADMCursor c) throws AQLJException {
-        ((ADMCursor) c).setCurrentObject(currentObject);
-    }
-
-    @Override
-    public void position(IADMCursor c, String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        ((ADMCursor) c).setCurrentObject(o);
-    }
-
-    private IAObject getObjectByField(String field) throws AQLJException {
-        ATypeTag tag = currentObject.getType().getTypeTag();
-        if (tag != ATypeTag.RECORD) {
-            throw new AQLJException("object of type " + tag + " has no fields");
-        }
-        ARecord curRecord = (ARecord) currentObject;
-        ARecordType t = curRecord.getType();
-        int idx = t.findFieldPosition(field);
-        if (idx == -1) {
-            return null;
-        }
-        IAObject o = curRecord.getValueByPos(idx);
-        return o;
-    }
-
-    public void setCurrentObject(IAObject o) {
-        readOnce = false;
-        currentObject = o;
-        if (currentObject != null) {
-            if (currentObject.getType() instanceof AbstractCollectionType) {
-                collectionCursor = ((IACollection) currentObject).getCursor();
-            }
-        }
-    }
-
-    private void checkTypeTag(IAObject o, ATypeTag expectedTag) throws AQLJException {
-        ATypeTag actualTag;
-        actualTag = o.getType().getTypeTag();
-
-        if (actualTag != expectedTag) {
-            throw new AQLJException("cannot get " + expectedTag + " when type is " + actualTag);
-        }
-    }
-
-    @Override
-    public ABinary getBinary() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.BINARY);
-        return ((ABinary) currentObject);
-    }
-
-    @Override
-    public ABinary getBinary(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.BINARY);
-        return (ABinary) o;
-    }
-
-    @Override
-    public ABitArray getBitArray() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.BITARRAY);
-        return ((ABitArray) currentObject);
-    }
-
-    @Override
-    public ABitArray getBitArray(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.BITARRAY);
-        return (ABitArray) o;
-    }
-
-    @Override
-    public ABoolean getBoolean() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.BOOLEAN);
-        return ((ABoolean) currentObject);
-    }
-
-    @Override
-    public ABoolean getBoolean(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.BOOLEAN);
-        return (ABoolean) o;
-    }
-
-    @Override
-    public ACircle getCircle() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.CIRCLE);
-        return ((ACircle) currentObject);
-    }
-
-    @Override
-    public ACircle getCircle(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.CIRCLE);
-        return (ACircle) o;
-    }
-
-    @Override
-    public ADate getDate() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.DATE);
-        return ((ADate) currentObject);
-    }
-
-    @Override
-    public ADate getDate(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.DATE);
-        return (ADate) o;
-    }
-
-    @Override
-    public ADateTime getDateTime() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.DATETIME);
-        return ((ADateTime) currentObject);
-    }
-
-    @Override
-    public ADateTime getDateTime(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.DATETIME);
-        return (ADateTime) o;
-    }
-
-    @Override
-    public ADouble getDouble() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.DOUBLE);
-        return ((ADouble) currentObject);
-    }
-
-    @Override
-    public ADouble getDouble(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.DOUBLE);
-        return (ADouble) o;
-    }
-
-    @Override
-    public ADuration getDuration() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.DURATION);
-        return ((ADuration) currentObject);
-    }
-
-    @Override
-    public ADuration getDuration(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.DURATION);
-        return (ADuration) o;
-    }
-
-    @Override
-    public AFloat getFloat() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.FLOAT);
-        return ((AFloat) currentObject);
-    }
-
-    @Override
-    public AFloat getFloat(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.FLOAT);
-        return (AFloat) o;
-    }
-
-    @Override
-    public AInt8 getInt8() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.INT8);
-        return ((AInt8) currentObject);
-    }
-
-    @Override
-    public AInt8 getInt8(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.INT8);
-        return (AInt8) o;
-    }
-
-    @Override
-    public AInt16 getInt16() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.INT16);
-        return ((AInt16) currentObject);
-    }
-
-    @Override
-    public AInt16 getInt16(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.INT16);
-        return (AInt16) o;
-    }
-
-    @Override
-    public AInt32 getInt32() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.INT32);
-        return ((AInt32) currentObject);
-    }
-
-    @Override
-    public AInt32 getInt32(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.INT32);
-        return (AInt32) o;
-    }
-
-    @Override
-    public AInt64 getInt64() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.INT64);
-        return ((AInt64) currentObject);
-    }
-
-    @Override
-    public AInt64 getInt64(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.INT64);
-        return (AInt64) o;
-    }
-
-    @Override
-    public ALine getLine() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.LINE);
-        return ((ALine) currentObject);
-    }
-
-    @Override
-    public ALine getLine(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.LINE);
-        return (ALine) o;
-    }
-
-    @Override
-    public APoint getPoint() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.POINT);
-        return ((APoint) currentObject);
-    }
-
-    @Override
-    public APoint getPoint(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.POINT);
-        return (APoint) o;
-    }
-
-    @Override
-    public APoint3D getPoint3D() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.POINT3D);
-        return ((APoint3D) currentObject);
-    }
-
-    @Override
-    public APoint3D getPoint3D(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.POINT3D);
-        return (APoint3D) o;
-    }
-
-    @Override
-    public APolygon getPolygon() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.POLYGON);
-        return ((APolygon) currentObject);
-    }
-
-    @Override
-    public APolygon getPolygon(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.POLYGON);
-        return (APolygon) o;
-    }
-
-    @Override
-    public ARectangle getRectangle() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.RECTANGLE);
-        return ((ARectangle) currentObject);
-    }
-
-    @Override
-    public ARectangle getRectangle(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.RECTANGLE);
-        return (ARectangle) o;
-    }
-
-    @Override
-    public AString getString() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.STRING);
-        return ((AString) currentObject);
-    }
-
-    @Override
-    public AString getString(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.STRING);
-        return (AString) o;
-    }
-
-    @Override
-    public ATime getTime() throws AQLJException {
-        checkTypeTag(currentObject, ATypeTag.TIME);
-        return ((ATime) currentObject);
-    }
-
-    @Override
-    public ATime getTime(String field) throws AQLJException {
-        IAObject o = getObjectByField(field);
-        checkTypeTag(o, ATypeTag.TIME);
-        return (ATime) o;
-    }
-
-    public IAType getType() {
-        if (currentObject != null) {
-            return currentObject.getType();
-        }
-        return null;
-    }
-
-    @Override
-    public IAObject get() {
-        return currentObject;
-    }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJClientDriver.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJClientDriver.java
deleted file mode 100644
index a1a077b..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJClientDriver.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-
-/**
- * This class encapsulates the mechanism for creating a connection to an ASTERIX
- * server.
- * 
- * @author zheilbron
- */
-public class AQLJClientDriver {
-    /**
-     * Get a connection to the ASTERIX server.
-     * 
-     * @param host
-     *            the ip or hostname of the ASTERIX server
-     * @param port
-     *            the port of the ASTERIX server (default: 14600)
-     * @param dataverse
-     *            the name of the dataverse to use for any AQL statements
-     * @return an IAQLJConnection object representing the connection to ASTERIX
-     * @throws AQLJException
-     */
-    public static IAQLJConnection getConnection(String host, int port, String dataverse) throws AQLJException {
-        return new AQLJConnection(host, port, dataverse);
-    }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJConnection.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJConnection.java
deleted file mode 100644
index c597450..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJConnection.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import java.io.IOException;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJProtocol;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJStream;
-
-/**
- * This class is the implementation of IAQLJConnection and is the means for
- * communication between a client and an ASTERIX server. The messages passed
- * through this connection conform to the AQLJ protocol.
- * 
- * @author zheilbron
- */
-public class AQLJConnection implements IAQLJConnection {
-    private final String dataverse;
-    private final AQLJStream aqljStream;
-
-    public AQLJConnection(String host, int port, String dataverse) throws AQLJException {
-        this.dataverse = dataverse;
-
-        try {
-            aqljStream = new AQLJStream(host, port);
-        } catch (IOException e) {
-            throw new AQLJException("Could not connect to " + host + ":" + port);
-        }
-
-        startup();
-    }
-
-    private void startup() throws AQLJException {
-        sendStartupMessage(dataverse);
-        getStartupResponse();
-    }
-
-    private void sendStartupMessage(String dataverse) throws AQLJException {
-        try {
-            byte[] dvBytes = dataverse.getBytes("UTF-8");
-            // 4 for the message length, 1 for the message type, 2 for the
-            // string length
-            aqljStream.sendUnsignedInt32(4 + 1 + 2 + dvBytes.length);
-            aqljStream.sendChar(AQLJProtocol.STARTUP_MESSAGE);
-            aqljStream.sendInt16(dvBytes.length);
-            aqljStream.send(dvBytes);
-            aqljStream.flush();
-        } catch (IOException e) {
-            throw new AQLJException(e);
-        }
-    }
-
-    private void getStartupResponse() throws AQLJException {
-        try {
-            aqljStream.receiveUnsignedInt32();
-            int messageType = aqljStream.receiveChar();
-            switch (messageType) {
-                case AQLJProtocol.READY_MESSAGE:
-                    break;
-                case AQLJProtocol.ERROR_MESSAGE:
-                    String err = aqljStream.receiveString();
-                    throw new AQLJException(err);
-                default:
-                    throw new AQLJException("Error: unable to parse message from server");
-            }
-        } catch (IOException e) {
-            throw new AQLJException(e);
-        }
-    }
-
-    @Override
-    public IAQLJResult execute(String stmt) throws AQLJException {
-        sendExecute(stmt);
-        return fetchResults();
-    }
-
-    private AQLJResult fetchResults() throws AQLJException {
-        long len;
-        int messageType;
-
-        ResultBuffer rb = null;
-        while (true) {
-            try {
-                len = aqljStream.receiveUnsignedInt32();
-                messageType = aqljStream.receiveChar();
-                switch (messageType) {
-                    case AQLJProtocol.DATA_MESSAGE:
-                        // DataRecord
-                        if (rb == null) {
-                            rb = new ResultBuffer();
-                        }
-                        rb.appendMessage(aqljStream, (int) (len - 5));
-                        break;
-                    case AQLJProtocol.EXECUTE_COMPLETE_MESSAGE:
-                        // ExecuteComplete
-                        return new AQLJResult(rb);
-                    case AQLJProtocol.ERROR_MESSAGE:
-                        // Error
-                        throw new AQLJException(aqljStream.receiveString());
-                    default:
-                        throw new AQLJException("Error: received unknown message type from server");
-                }
-            } catch (IOException e) {
-                throw new AQLJException(e);
-            }
-        }
-
-    }
-
-    private void sendExecute(String stmt) throws AQLJException {
-        try {
-            byte[] stmtBytes = stmt.getBytes("UTF-8");
-            // 4 for the message length, 1 for the message type, 2 for the
-            // string length
-            aqljStream.sendUnsignedInt32(4 + 1 + 2 + stmtBytes.length);
-            aqljStream.sendChar(AQLJProtocol.EXECUTE_MESSAGE);
-            aqljStream.sendInt16(stmtBytes.length);
-            aqljStream.send(stmtBytes);
-            aqljStream.flush();
-        } catch (IOException e) {
-            throw new AQLJException(e);
-        }
-    }
-
-    @Override
-    public void close() throws IOException {
-        aqljStream.close();
-    }
-
-    @Override
-    public IADMCursor createADMCursor() {
-        return new ADMCursor(null);
-    }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJResult.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJResult.java
deleted file mode 100644
index 63114ce..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJResult.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import java.io.IOException;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-
-/**
- * This class is special type of ADMCursor in that it has a result buffer
- * associated with it. It can be thought of as the "base" cursor for some ADM
- * results.
- * 
- * @author zheilbron
- */
-public class AQLJResult extends ADMCursor implements IAQLJResult {
-    private final ResultBuffer resultBuffer;
-
-    public AQLJResult(ResultBuffer buffer) {
-        super(null);
-        this.resultBuffer = buffer;
-    }
-
-    @Override
-    public boolean next() throws AQLJException {
-        currentObject = resultBuffer.get();
-        if (currentObject == null) {
-            return false;
-        }
-        return true;
-    }
-
-    public void close() throws IOException {
-        resultBuffer.close();
-    }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IADMCursor.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IADMCursor.java
deleted file mode 100644
index a7500c9..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IADMCursor.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-import edu.uci.ics.asterix.om.base.ABinary;
-import edu.uci.ics.asterix.om.base.ABitArray;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ACircle;
-import edu.uci.ics.asterix.om.base.ADate;
-import edu.uci.ics.asterix.om.base.ADateTime;
-import edu.uci.ics.asterix.om.base.ADouble;
-import edu.uci.ics.asterix.om.base.ADuration;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt16;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AInt64;
-import edu.uci.ics.asterix.om.base.AInt8;
-import edu.uci.ics.asterix.om.base.ALine;
-import edu.uci.ics.asterix.om.base.APoint;
-import edu.uci.ics.asterix.om.base.APoint3D;
-import edu.uci.ics.asterix.om.base.APolygon;
-import edu.uci.ics.asterix.om.base.ARectangle;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.base.ATime;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.types.IAType;
-
-/**
- * The mechanism by which results are iterated over. Results from ASTERIX may
- * come in the form of a set of objects which may either be primitives (e.g.
- * int, string, ...), collections (e.g. ordered lists, unordered lists, ...),
- * records, or some combination thereof.
- * 
- * @author zheilbron
- */
-public interface IADMCursor {
-    public ABinary getBinary() throws AQLJException;
-
-    public ABinary getBinary(String field) throws AQLJException;
-
-    public ABitArray getBitArray() throws AQLJException;
-
-    public ABitArray getBitArray(String field) throws AQLJException;
-
-    public ABoolean getBoolean() throws AQLJException;
-
-    public ABoolean getBoolean(String field) throws AQLJException;
-
-    public ACircle getCircle() throws AQLJException;
-
-    public ACircle getCircle(String field) throws AQLJException;
-
-    public ADate getDate() throws AQLJException;
-
-    public ADate getDate(String field) throws AQLJException;
-
-    public ADateTime getDateTime() throws AQLJException;
-
-    public ADateTime getDateTime(String field) throws AQLJException;
-
-    public ADouble getDouble() throws AQLJException;
-
-    public ADouble getDouble(String field) throws AQLJException;
-
-    public ADuration getDuration() throws AQLJException;
-
-    public ADuration getDuration(String field) throws AQLJException;
-
-    public AFloat getFloat() throws AQLJException;
-
-    public AFloat getFloat(String field) throws AQLJException;
-
-    public AInt8 getInt8() throws AQLJException;
-
-    public AInt8 getInt8(String field) throws AQLJException;
-
-    public AInt16 getInt16() throws AQLJException;
-
-    public AInt16 getInt16(String field) throws AQLJException;
-
-    public AInt32 getInt32() throws AQLJException;
-
-    public AInt32 getInt32(String field) throws AQLJException;
-
-    public AInt64 getInt64() throws AQLJException;
-
-    public AInt64 getInt64(String field) throws AQLJException;
-
-    public ALine getLine() throws AQLJException;
-
-    public ALine getLine(String field) throws AQLJException;
-
-    public APoint getPoint() throws AQLJException;
-
-    public APoint getPoint(String field) throws AQLJException;
-
-    public APoint3D getPoint3D() throws AQLJException;
-
-    public APoint3D getPoint3D(String field) throws AQLJException;
-
-    public APolygon getPolygon() throws AQLJException;
-
-    public APolygon getPolygon(String field) throws AQLJException;
-
-    public ARectangle getRectangle() throws AQLJException;
-
-    public ARectangle getRectangle(String field) throws AQLJException;
-
-    public AString getString(String field) throws AQLJException;
-
-    public AString getString() throws AQLJException;
-
-    public ATime getTime() throws AQLJException;
-
-    public ATime getTime(String field) throws AQLJException;
-
-    /**
-     * Advances the cursor to the next object
-     * 
-     * @return true if the cursor points to a an object
-     * @throws AQLJException
-     */
-    public boolean next() throws AQLJException;
-
-    /**
-     * Positions the cursor c on the object pointed to by this
-     * 
-     * @param c
-     *            the cursor to position
-     * @throws AQLJException
-     */
-    public void position(IADMCursor c) throws AQLJException;
-
-    /**
-     * Positions the cursor c on the object associated with the given field
-     * 
-     * @param c
-     *            the cursor to position
-     * @param field
-     *            the field name
-     * @throws AQLJException
-     */
-    public void position(IADMCursor c, String field) throws AQLJException;
-
-    /**
-     * Returns the type of the current object being pointed at, which may be
-     * null.
-     * 
-     * @return the type of the current object
-     */
-    public IAType getType();
-
-    /**
-     * Returns the current object being pointed at, which may be null.
-     * 
-     * @return the current object
-     */
-    public IAObject get();
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IAQLJConnection.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IAQLJConnection.java
deleted file mode 100644
index 8fdf59d..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IAQLJConnection.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import java.io.IOException;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-
-/**
- * The connection (session) that serves as the context for communicating with
- * ASTERIX.
- * 
- * @author zheilbron
- */
-public interface IAQLJConnection {
-    /**
-     * Execute an AQL statement that returns an IAQLJResult. The IAQLJResult
-     * will contain all associated results of the AQL statement.
-     * 
-     * @param stmt
-     *            the AQL statement
-     * @return the results of the AQL statement as an IAQLJResult
-     * @throws AQLJException
-     */
-    public IAQLJResult execute(String stmt) throws AQLJException;
-
-    /**
-     * Create a cursor to iterate over results
-     * 
-     * @return an unpositioned cursor
-     */
-    public IADMCursor createADMCursor();
-
-    /**
-     * Close the connection with the server.
-     */
-    public void close() throws IOException;
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IAQLJResult.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IAQLJResult.java
deleted file mode 100644
index b28b3c6..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IAQLJResult.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import java.io.IOException;
-
-/**
- * The results associated with an AQL statement.
- * 
- * @author zheilbron
- */
-public interface IAQLJResult extends IADMCursor {
-    /**
-     * Close the cursor and discard any associated results.
-     * It's important to ensure that this method is called in order to free up
-     * the associated result buffer.
-     */
-    public void close() throws IOException;
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/ResultBuffer.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/ResultBuffer.java
deleted file mode 100644
index 9fddad5..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/ResultBuffer.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import java.io.DataInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.ObjectInputStream;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJStream;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-/**
- * This class supports the buffering of results that are received from the
- * server. The results are buffered completely to a file on disk. The results
- * that are sent back should contain a serde in order to read the results back
- * in. To see the expected format refer to {@link edu.uci.ics.algebricks.runtime.hyracks.writers.SerializedDataWriterFactory} .
- * 
- * @author zheilbron
- */
-public class ResultBuffer {
-    private static final Logger LOGGER = Logger.getLogger(ResultBuffer.class.getName());
-
-    private static final int BUF_SIZE = 8192;
-
-    private final byte[] buffer;
-    private final File tmpFile;
-    private final FileOutputStream fos;
-    private final FileInputStream fis;
-    private final DataInputStream dis;
-
-    private ObjectInputStream ois;
-    private ISerializerDeserializer serde;
-
-    public ResultBuffer() throws IOException {
-        buffer = new byte[BUF_SIZE];
-        tmpFile = File.createTempFile("aqlj", null, new File(System.getProperty("java.io.tmpdir")));
-        fos = new FileOutputStream(tmpFile);
-        fis = new FileInputStream(tmpFile);
-        dis = new DataInputStream(fis);
-        serde = null;
-    }
-
-    private RecordDescriptor getRecordDescriptor() throws AQLJException {
-        RecordDescriptor rd;
-        try {
-            ois = new ObjectInputStream(fis);
-        } catch (IOException e) {
-            throw new AQLJException(e);
-        }
-        try {
-            rd = (RecordDescriptor) ois.readObject();
-        } catch (IOException e) {
-            throw new AQLJException(e);
-        } catch (ClassNotFoundException e) {
-            throw new AQLJException(e);
-        }
-        return rd;
-    }
-
-    public IAObject get() throws AQLJException {
-        Object o;
-
-        if (serde == null) {
-            serde = getRecordDescriptor().getFields()[0];
-        }
-
-        try {
-            o = serde.deserialize(dis);
-        } catch (HyracksDataException e) {
-            // this is expected behavior... we know when we've reached the end
-            // of the
-            // results when a EOFException (masked by the HyracksDataException)
-            // is thrown
-            o = null;
-        }
-
-        return (IAObject) o;
-    }
-
-    public void appendMessage(AQLJStream aqljStream, long len) throws IOException {
-        long pos = 0;
-        long read = 0;
-        long remaining = 0;
-
-        while (pos < len) {
-            remaining = len - pos;
-            read = remaining > BUF_SIZE ? BUF_SIZE : remaining;
-            aqljStream.receive(buffer, 0, (int) read);
-            pos += read;
-            fos.write(buffer, 0, (int) read);
-        }
-    }
-
-    public void close() throws IOException {
-        // remove the file!
-        if (tmpFile.exists()) {
-            tmpFile.delete();
-        }
-        fos.close();
-        fis.close();
-        dis.close();
-        if (ois != null) {
-            ois.close();
-        }
-    }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJProtocol.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJProtocol.java
deleted file mode 100644
index 83ef0e5..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJProtocol.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.common;
-
-/**
- * This class provides constants for message types in the AQLJ protocol.
- * 
- * @author zheilbron
- */
-public abstract class AQLJProtocol {
-    public static final char STARTUP_MESSAGE = 'S';
-    public static final char EXECUTE_MESSAGE = 'X';
-    public static final char READY_MESSAGE = 'R';
-    public static final char ERROR_MESSAGE = 'E';
-    public static final char EXECUTE_COMPLETE_MESSAGE = 'C';
-    public static final char DATA_MESSAGE = 'D';
-    public static final char GET_RESULTS_MESSAGE = 'G';
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJStream.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJStream.java
deleted file mode 100644
index c595284..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJStream.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.common;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.EOFException;
-import java.io.IOException;
-import java.net.Socket;
-
-/**
- * This class provides a clean mechanism for sending and receiving the data
- * types involved in the AQLJ protocol.
- * 
- * @author zheilbron
- */
-public class AQLJStream {
-    private static final int BUF_SIZE = 8192;
-
-    private final String host;
-    private final int port;
-    private final Socket connection;
-    private final BufferedInputStream aqljInput;
-    private final BufferedOutputStream aqljOutput;
-
-    private final byte[] int16Buf;
-    private final byte[] int32Buf;
-
-    public AQLJStream(String host, int port) throws IOException {
-        this.host = host;
-        this.port = port;
-
-        connection = new Socket(host, port);
-
-        aqljInput = new BufferedInputStream(connection.getInputStream(), BUF_SIZE);
-        aqljOutput = new BufferedOutputStream(connection.getOutputStream(), BUF_SIZE);
-
-        int16Buf = new byte[2];
-        int32Buf = new byte[4];
-    }
-
-    public AQLJStream(Socket sock) throws IOException {
-        this.host = null;
-        this.port = 0;
-
-        this.connection = sock;
-        aqljInput = new BufferedInputStream(connection.getInputStream(), BUF_SIZE);
-        aqljOutput = new BufferedOutputStream(connection.getOutputStream(), BUF_SIZE);
-
-        int16Buf = new byte[2];
-        int32Buf = new byte[4];
-    }
-
-    public String getHost() {
-        return host;
-    }
-
-    public int getPort() {
-        return port;
-    }
-
-    public Socket getSocket() {
-        return connection;
-    }
-
-    public void receive(byte[] buf, int off, int len) throws IOException {
-        int read;
-        int count = 0;
-        while (count < len) {
-            read = aqljInput.read(buf, off + count, len - count);
-            if (read < 0) {
-                throw new EOFException();
-            }
-            count += read;
-        }
-    }
-
-    public byte[] receive(int len) throws IOException {
-        byte[] result = new byte[len];
-        receive(result, 0, len);
-        return result;
-    }
-
-    public int receiveInt16() throws IOException {
-        if (aqljInput.read(int16Buf) != 2) {
-            throw new EOFException();
-        }
-        return (int16Buf[0] & 0xff) << 8 | (int16Buf[1] & 0xff);
-    }
-
-    public long receiveUnsignedInt32() throws IOException {
-        if (aqljInput.read(int32Buf) != 4) {
-            throw new EOFException();
-        }
-        return ((int32Buf[0] & 0xff) << 24 | (int32Buf[1] & 0xff) << 16 | (int32Buf[2] & 0xff) << 8 | (int32Buf[3] & 0xff)) & 0x00000000ffffffffl;
-    }
-
-    public int receiveChar() throws IOException {
-        int c = aqljInput.read();
-        if (c < 0) {
-            throw new EOFException();
-        }
-        return c;
-    }
-
-    public String receiveString() throws IOException {
-        int strlen = receiveInt16();
-        return new String(receive(strlen), "UTF8");
-    }
-
-    public void send(byte[] buf) throws IOException {
-        aqljOutput.write(buf);
-    }
-
-    public void send(byte[] buf, int off, int len) throws IOException {
-        aqljOutput.write(buf, off, len);
-    }
-
-    public void sendInt16(int val) throws IOException {
-        int16Buf[0] = (byte) (val >>> 8);
-        int16Buf[1] = (byte) (val);
-        aqljOutput.write(int16Buf);
-    }
-
-    public void sendUnsignedInt32(long val) throws IOException {
-        int32Buf[0] = (byte) (val >>> 24);
-        int32Buf[1] = (byte) (val >>> 16);
-        int32Buf[2] = (byte) (val >>> 8);
-        int32Buf[3] = (byte) (val);
-        aqljOutput.write(int32Buf);
-    }
-
-    public void sendChar(int c) throws IOException {
-        aqljOutput.write(c);
-    }
-
-    public void sendString(byte[] strBytes) throws IOException {
-        sendInt16(strBytes.length);
-        send(strBytes);
-    }
-
-    public void sendString(String str) throws IOException {
-        byte[] strBytes = str.getBytes("UTF8");
-        sendInt16(strBytes.length);
-        send(strBytes);
-    }
-
-    public void flush() throws IOException {
-        aqljOutput.flush();
-    }
-
-    public void close() throws IOException {
-        aqljInput.close();
-        aqljOutput.close();
-        connection.close();
-    }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/APIClientThread.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/APIClientThread.java
deleted file mode 100644
index 68f6dd2..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/APIClientThread.java
+++ /dev/null
@@ -1,325 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.server;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringReader;
-import java.net.Socket;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJProtocol;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJStream;
-import edu.uci.ics.asterix.api.common.APIFramework.DisplayFormat;
-import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
-import edu.uci.ics.asterix.api.common.SessionConfig;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.parser.AQLParser;
-import edu.uci.ics.asterix.aql.parser.ParseException;
-import edu.uci.ics.asterix.aql.translator.AqlTranslator;
-import edu.uci.ics.asterix.aql.translator.QueryResult;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.hyracks.bootstrap.AsterixNodeState;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
-import edu.uci.ics.asterix.metadata.bootstrap.AsterixProperties;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-
-/**
- * This class is the client handler for the APIServer. The AQLJ protocol is used
- * for communicating with the client. The client, for example, may send a
- * message to execute a set containing one or more AQL statements. It is up to this class to process each
- * AQL statement (in the original order) and pass back the results, if any, to the client.
- * 
- * @author zheilbron
- */
-public class APIClientThread extends Thread {
-    private static final Logger LOGGER = Logger.getLogger(APIClientThread.class.getName());
-
-    private static final int RESULT_BUF_SIZE = 8192;
-
-    private final IHyracksClientConnection hcc;
-    private final ICCApplicationContext appContext;
-    private final AQLJStream clientStream;
-    private final String outputFilePath;
-    private final String outputNodeName;
-    private final String outputNodeIP;
-    private final String binaryOutputClause;
-
-    private AQLJStream nodeDataServerStream;
-    private int nodeDataServerPort;
-    private String dataverse;
-
-    public APIClientThread(IHyracksClientConnection hcc, Socket clientSocket, ICCApplicationContext appCtx)
-            throws IOException {
-        this.hcc = hcc;
-        clientStream = new AQLJStream(clientSocket);
-        this.appContext = appCtx;
-
-        // get the name of the first node controller that we find
-        // all query results will be written to this node
-        Map<String, Set<String>> nodeNameMap = new HashMap<String, Set<String>>();
-        try {
-            this.appContext.getCCContext().getIPAddressNodeMap(nodeNameMap);
-        } catch (Exception e) {
-            throw new IOException(" unable to obtain IP address node map", e);
-        }
-        outputNodeIP = (String) nodeNameMap.keySet().toArray()[0];
-        outputNodeName = (String) nodeNameMap.get(outputNodeIP).toArray()[0];
-
-        // get the port of the node data server that is running on the first nc
-        IAsterixStateProxy proxy = (IAsterixStateProxy) appCtx.getDistributedState();
-        nodeDataServerPort = ((AsterixNodeState) proxy.getAsterixNodeState(outputNodeName)).getAPINodeDataServerPort();
-        nodeDataServerStream = null;
-
-        // write the data into the output stores directory of the nc
-        // if output stores are unavailable (could they ever be?), then write to
-        // tmpdir which can be overridden
-        // Also, use milliseconds in path name of output file to differentiate
-        // queries
-        Map<String, String[]> storesMap = AsterixProperties.INSTANCE.getStores();
-        String[] outputStores = storesMap.get(outputNodeName);
-        if (outputStores.length > 0) {
-            outputFilePath = outputStores[0] + System.currentTimeMillis() + ".adm";
-        } else {
-            outputFilePath = System.getProperty("java.io.tmpdir") + File.pathSeparator + System.currentTimeMillis()
-                    + ".adm";
-        }
-
-        // the "write output..." clause is inserted into incoming AQL statements
-        binaryOutputClause = "write output to " + outputNodeName + ":\"" + outputFilePath
-                + "\" using \"edu.uci.ics.hyracks.algebricks.runtime.writers.SerializedDataWriterFactory\";";
-
-    }
-
-    private void startup() throws IOException {
-        int messageType;
-
-        clientStream.receiveUnsignedInt32();
-        messageType = clientStream.receiveChar();
-        dataverse = clientStream.receiveString();
-        if (messageType == AQLJProtocol.STARTUP_MESSAGE) {
-            // send Ready
-            sendReady();
-        } else {
-            // send Error
-            LOGGER.warning("Error: received message other than Startup. Exiting.");
-            String err = "startup failed: no Startup message received";
-            sendError(err);
-        }
-    }
-
-    public void run() {
-        String outputPath;
-        int messageType;
-
-        try {
-            // startup phase
-            startup();
-
-            // normal execution phase
-            while (true) {
-                // check if we should close
-                if (Thread.interrupted()) {
-                    close();
-                    return;
-                }
-
-                clientStream.receiveUnsignedInt32();
-                messageType = clientStream.receiveChar();
-                switch (messageType) {
-                    case AQLJProtocol.EXECUTE_MESSAGE:
-                        // Execute
-                        String query = clientStream.receiveString();
-                        String fullQuery = "use dataverse " + dataverse + ";\n" + binaryOutputClause + '\n' + query;
-
-                        try {
-                            outputPath = executeStatement(fullQuery);
-                        } catch (AQLJException e) {
-                            LOGGER.severe("Error occurred while executing query: " + fullQuery);
-                            LOGGER.severe(e.getMessage());
-                            sendError(e.getMessage());
-                            break;
-                        }
-
-                        if (outputPath == null) {
-                            // The query ran, but produced no results. This
-                            // means cardinality of the
-                            // result is 0 or "actions" were performed, where
-                            // actions are things like create
-                            // type, create dataset, etc.
-                            sendExecuteComplete();
-                        } else {
-                            // otherwise, there are some results, so send them
-                            // back to the client
-                            if (sendResults(outputPath)) {
-                                sendExecuteComplete();
-                            } else {
-                                String err = "Error: unable to retrieve results from " + outputNodeName;
-                                LOGGER.severe(err);
-                                sendError(err);
-                            }
-                        }
-                        break;
-                    default:
-                        String err = "Error: received unknown message of type " + (char) messageType;
-                        sendError(err);
-                        LOGGER.severe(err);
-                        close();
-                        return;
-                }
-            }
-        } catch (IOException e) {
-            // the normal path that is taken when exiting
-            close();
-            return;
-        }
-    }
-
-    private void close() {
-        try {
-            if (nodeDataServerStream != null) {
-                nodeDataServerStream.close();
-            }
-        } catch (IOException e) {
-            LOGGER.severe("Error closing NodeData AQLJStream");
-            LOGGER.severe(e.getMessage());
-        }
-        try {
-            clientStream.close();
-        } catch (IOException e) {
-            LOGGER.severe("Error closing client AQLJStream");
-            LOGGER.severe(e.getMessage());
-        }
-    }
-
-    private String executeStatement(String stmt) throws IOException, AQLJException {
-        List<QueryResult> executionResults = null;
-        PrintWriter out = new PrintWriter(System.out);
-        try {
-            AQLParser parser = new AQLParser(new StringReader(stmt));
-            List<Statement> statements = parser.Statement();
-            SessionConfig pc = new SessionConfig(AsterixHyracksIntegrationUtil.DEFAULT_HYRACKS_CC_CLIENT_PORT, true,
-                    false, false, false, false, false, true, false);
-
-            MetadataManager.INSTANCE.init();
-            if (statements != null && statements.size() > 0) {
-                AqlTranslator translator = new AqlTranslator(statements, out, pc, DisplayFormat.TEXT);
-                executionResults = translator.compileAndExecute(hcc, false);
-            }
-        } catch (ParseException e) {
-            e.printStackTrace();
-            throw new AQLJException(e);
-        } catch (AsterixException e) {
-            e.printStackTrace();
-            throw new AQLJException(e);
-        } catch (AlgebricksException e) {
-            e.printStackTrace();
-            throw new AQLJException(e);
-        } catch (Exception e) {
-            e.printStackTrace();
-            sendError(e.getMessage());
-        }
-        return null;
-
-    }
-
-    private boolean sendResults(String path) throws IOException {
-        int messageType;
-        long len;
-        int sent;
-        int toSend;
-        byte[] buf = new byte[RESULT_BUF_SIZE];
-
-        if (nodeDataServerStream == null) {
-            nodeDataServerStream = new AQLJStream(outputNodeIP, nodeDataServerPort);
-        }
-        sendGetResults(nodeDataServerStream);
-
-        // forward data packets from the nodedataservers through this server to
-        // the client
-        while (true) {
-            len = nodeDataServerStream.receiveUnsignedInt32();
-            messageType = nodeDataServerStream.receiveChar();
-            switch ((char) messageType) {
-                case AQLJProtocol.DATA_MESSAGE:
-                    clientStream.sendUnsignedInt32(len);
-                    clientStream.sendChar(AQLJProtocol.DATA_MESSAGE);
-                    len -= 5;
-                    sent = 0;
-                    while (sent < len) {
-                        len -= sent;
-                        toSend = (len > buf.length) ? buf.length : (int) len;
-                        nodeDataServerStream.receive(buf, 0, toSend);
-                        clientStream.send(buf, 0, toSend);
-                        sent += toSend;
-                    }
-                    clientStream.flush();
-                    break;
-                case AQLJProtocol.EXECUTE_COMPLETE_MESSAGE:
-                    nodeDataServerStream.close();
-                    nodeDataServerStream = null;
-                    return true;
-                default:
-                    nodeDataServerStream.close();
-                    nodeDataServerStream = null;
-                    return false;
-            }
-        }
-    }
-
-    private void sendGetResults(AQLJStream s) throws IOException {
-        byte[] pathBytes = outputFilePath.getBytes("UTF-8");
-        // 4 for the message length, 1 for the message type, 2 for the string
-        // length
-        s.sendUnsignedInt32(4 + 1 + 2 + pathBytes.length);
-        s.sendChar(AQLJProtocol.GET_RESULTS_MESSAGE);
-        s.sendString(outputFilePath);
-        s.flush();
-    }
-
-    private void sendReady() throws IOException {
-        // 4 for the message length and 1 for the message type (4 + 1 = 5)
-        clientStream.sendUnsignedInt32(5);
-        clientStream.sendChar(AQLJProtocol.READY_MESSAGE);
-        clientStream.flush();
-    }
-
-    private void sendError(String msg) throws IOException {
-        byte[] msgBytes = msg.getBytes("UTF-8");
-        // 4 for the message length, 1 for the message type, 2 for the string
-        // length
-        clientStream.sendUnsignedInt32(4 + 1 + 2 + msgBytes.length);
-        clientStream.sendChar(AQLJProtocol.ERROR_MESSAGE);
-        clientStream.sendInt16(msgBytes.length);
-        clientStream.send(msgBytes);
-        clientStream.flush();
-    }
-
-    private void sendExecuteComplete() throws IOException {
-        // 4 for the message length and 1 for the message type (4 + 1 = 5)
-        clientStream.sendUnsignedInt32(5);
-        clientStream.sendChar(AQLJProtocol.EXECUTE_COMPLETE_MESSAGE);
-        clientStream.flush();
-    }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/APIClientThreadFactory.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/APIClientThreadFactory.java
deleted file mode 100644
index 7f35af6..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/APIClientThreadFactory.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.server;
-
-import java.io.IOException;
-import java.net.Socket;
-
-import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-
-/**
- * This class is a factory for client handler threads of type {@link APIClientThread} and is used in conjunction with {@link ThreadedServer}.
- * 
- * @author zheilbron
- */
-public class APIClientThreadFactory implements IClientThreadFactory {
-    private final ICCApplicationContext appContext;
-
-    private IHyracksClientConnection hcc;
-
-    public APIClientThreadFactory(ICCApplicationContext appContext) throws Exception {
-        this.appContext = appContext;
-        hcc = new HyracksConnection(appContext.getCCContext().getClusterControllerInfo().getClientNetAddress(), appContext.getCCContext().getClusterControllerInfo()
-                .getClientNetPort());
-    }
-
-    @Override
-    public Thread createThread(Socket socket) throws IOException {
-        return new APIClientThread(hcc, socket, appContext);
-    }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/IClientThreadFactory.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/IClientThreadFactory.java
deleted file mode 100644
index bca7f4d..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/IClientThreadFactory.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.server;
-
-import java.io.IOException;
-import java.net.Socket;
-
-/**
- * Implementing this interface allows a class such as {@link ThreadedServer} to
- * spawn a particular type of thread to handle some client connection.
- * 
- * @author zheilbron
- */
-public interface IClientThreadFactory {
-    public Thread createThread(Socket socket) throws IOException;
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/NodeDataClientThread.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/NodeDataClientThread.java
deleted file mode 100644
index 0246fd9..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/NodeDataClientThread.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.server;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.net.Socket;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJProtocol;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJStream;
-
-/**
- * This class handles data requests from the APIServer. When a query is executed
- * through the API, the output is written to the local disk of some NC. The
- * APIServer will contact that NC and ask for the results of the query to be
- * sent. This class handles such communication between the NC and APIServer.
- * 
- * @author zheilbron
- */
-public class NodeDataClientThread extends Thread {
-    private static final Logger LOGGER = Logger.getLogger(NodeDataClientThread.class.getName());
-
-    private static final int RESULT_BUFFER_SIZE = 8192;
-
-    private final AQLJStream aqljStream;
-
-    public NodeDataClientThread(Socket clientSocket) throws IOException {
-        aqljStream = new AQLJStream(clientSocket);
-    }
-
-    public void run() {
-        try {
-            getFile();
-        } catch (IOException e) {
-            LOGGER.severe("I/O error occurred over AQLJStream (socket)");
-            LOGGER.severe(e.getMessage());
-        } finally {
-            close();
-        }
-    }
-
-    private void getFile() throws IOException {
-        aqljStream.receiveUnsignedInt32();
-        int type = aqljStream.receiveChar();
-        if ((char) type != AQLJProtocol.GET_RESULTS_MESSAGE) {
-            return;
-        }
-
-        String path = aqljStream.receiveString();
-        File outputFile = new File(path);
-        FileInputStream fis = null;
-        try {
-            fis = new FileInputStream(outputFile);
-        } catch (FileNotFoundException e) {
-            LOGGER.warning("Error: requested file not found: " + path);
-            return;
-        }
-
-        byte[] buf = new byte[RESULT_BUFFER_SIZE];
-        long maxPayload = 0xffffffffL - 5; // 2^32 (max size of payload) - 5
-                                           // (header size)
-        long remainingTotal = outputFile.length();
-        long remainingInner = 0;
-        int sentTotal = 0;
-        int sentInner = 0;
-        int toSend = 0;
-
-        // the results may be large, so cram as much into a packet as possible
-        while (remainingTotal > maxPayload) {
-            aqljStream.sendUnsignedInt32(4 + 1 + maxPayload);
-            aqljStream.sendChar(AQLJProtocol.DATA_MESSAGE);
-            sentInner = 0;
-            remainingInner = 0;
-            while (sentInner < maxPayload) {
-                remainingInner = maxPayload - sentInner;
-                toSend = fis.read(buf, 0, (remainingInner > buf.length) ? buf.length : (int) remainingInner);
-                sentInner += toSend;
-                aqljStream.send(buf, 0, toSend);
-            }
-            aqljStream.flush();
-            sentTotal += maxPayload;
-            remainingTotal -= sentTotal;
-        }
-
-        // send the remaining data
-        if (remainingTotal > 0) {
-            aqljStream.sendUnsignedInt32(4 + 1 + (int) remainingTotal);
-            aqljStream.sendChar(AQLJProtocol.DATA_MESSAGE);
-            sentInner = 0;
-            remainingInner = 0;
-            while (sentInner < remainingTotal) {
-                remainingInner = remainingTotal - sentInner;
-                toSend = fis.read(buf, 0, (remainingInner > buf.length) ? buf.length : (int) remainingInner);
-                sentInner += toSend;
-                aqljStream.send(buf, 0, toSend);
-            }
-            aqljStream.flush();
-        }
-        outputFile.delete();
-        aqljStream.sendUnsignedInt32(5);
-        aqljStream.sendChar(AQLJProtocol.EXECUTE_COMPLETE_MESSAGE);
-        aqljStream.flush();
-    }
-
-    private void close() {
-        try {
-            aqljStream.close();
-        } catch (IOException e) {
-            LOGGER.severe("Error closing AQLJStream");
-            LOGGER.severe(e.getMessage());
-        }
-    }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/NodeDataClientThreadFactory.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/NodeDataClientThreadFactory.java
deleted file mode 100644
index 22efa89..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/NodeDataClientThreadFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.server;
-
-import java.io.IOException;
-import java.net.Socket;
-
-/**
- * This class is a factory for client handler threads of type {@link NodeDataClientThread} and is used in conjunction with {@link ThreadedServer}.
- * 
- * @author zheilbron
- */
-public class NodeDataClientThreadFactory implements IClientThreadFactory {
-    @Override
-    public Thread createThread(Socket socket) throws IOException {
-        return new NodeDataClientThread(socket);
-    }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/ThreadedServer.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/ThreadedServer.java
deleted file mode 100644
index 573d6a3..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/ThreadedServer.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.server;
-
-import java.io.IOException;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.net.SocketException;
-import java.util.logging.Logger;
-
-/**
- * This server is a multithreaded server that spawns one connection per client
- * up to MAX_CLIENTS total clients. The type of thread spawned to handle each
- * client request is delegated to a client thread factory that implements the
- * IClientThreadFactory interface.
- * NOTE: The "BE" in logging messages stands for "back-end". This is to
- * differentiate from the "FE" or "front-end" when reviewing log messages.
- * 
- * @author zheilbron
- */
-public class ThreadedServer extends Thread {
-    private static Logger LOGGER = Logger.getLogger(ThreadedServer.class.getName());
-
-    private static final int MAX_CLIENTS = 10;
-
-    private final int port;
-    private final IClientThreadFactory factory;
-
-    private ServerSocket serverSocket;
-    private Socket clientSocket;
-    private Socket[] clientSockets;
-    private Thread[] threads;
-
-    public ThreadedServer(int port, IClientThreadFactory factory) {
-        this.port = port;
-        this.factory = factory;
-        this.clientSockets = new Socket[MAX_CLIENTS];
-        this.threads = new Thread[MAX_CLIENTS];
-        this.clientSocket = null;
-    }
-
-    public void run() {
-        try {
-            serverSocket = new ServerSocket(port);
-        } catch (IOException e) {
-            LOGGER.severe("Error listening on port: " + port);
-            LOGGER.severe(e.getMessage());
-            return;
-        }
-        LOGGER.info("Server started. Listening on port: " + port);
-
-        while (true) {
-            try {
-                clientSocket = serverSocket.accept();
-            } catch (SocketException e) {
-                // This is the normal path the server will take when exiting.
-                //
-                // In order to close the server down properly, the
-                // serverSocket.accept() call must
-                // be interrupted. The only way to interrupt the
-                // serverSocket.accept() call in the loop
-                // above is by calling serverSocket.close() (as is done in the
-                // ThreadedServer.shutdown() method
-                // below). The serverSocket.accept() then throws a
-                // SocketException, so we catch it here
-                // and assume that ThreadedServer.shutdown() was called.
-
-                return;
-            } catch (IOException e) {
-                LOGGER.severe("Failed to accept() connection");
-                LOGGER.severe(e.getMessage());
-            }
-
-            for (int i = 0; i < threads.length; i++) {
-                if (threads[i] == null || !threads[i].isAlive()) {
-                    try {
-                        threads[i] = factory.createThread(clientSocket);
-                    } catch (IOException e) {
-                        LOGGER.severe("Failed to create client handler thread");
-                        LOGGER.severe(e.getMessage());
-                    }
-                    clientSockets[i] = clientSocket;
-                    threads[i].start();
-                    clientSocket = null;
-                    break;
-                }
-            }
-
-            // setting the clientSocket to null is an indicator the there was
-            // room for the
-            // connection (i.e. the number of clients < MAX_CLIENTS). If it is
-            // not set, then
-            // there was no room for the connection, so the client is dropped.
-            if (clientSocket != null) {
-                try {
-                    clientSocket.close();
-                } catch (IOException e) {
-                    LOGGER.severe("Error closing (dropped) client socket.");
-                    LOGGER.severe(e.getMessage());
-                }
-                LOGGER.warning("Client was dropped. Maximum number of connections reached!");
-            }
-        }
-    }
-
-    public void shutdown() {
-        try {
-            serverSocket.close();
-        } catch (IOException e) {
-            LOGGER.severe("Error closing server socket.");
-            LOGGER.severe(e.getMessage());
-        }
-
-        try {
-            for (int i = 0; i < threads.length; i++) {
-                if (threads[i] != null && threads[i].isAlive()) {
-                    clientSockets[i].close();
-                    threads[i].interrupt();
-                }
-            }
-        } catch (IOException e) {
-            LOGGER.severe("Error closing client socket.");
-            LOGGER.severe(e.getMessage());
-        }
-    }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index e97275b..ccc72c1 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -280,7 +280,8 @@
                 MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
                 throw new AlgebricksException(e);
             }
-            // Following jobs are run under a separate transaction, that is committed/aborted by the JobEventListener
+            // Following jobs are run under a separate transaction, that is
+            // committed/aborted by the JobEventListener
             for (JobSpecification jobspec : jobsToExecute) {
                 JobId jobId = runJob(hcc, jobspec);
                 if (stmt.getKind() == Kind.QUERY) {
@@ -419,7 +420,7 @@
             }
         }
         MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), new Dataset(dataverseName,
-                datasetName, itemTypeName, datasetDetails, dsType));
+                datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType));
         if (dd.getDatasetType() == DatasetType.INTERNAL || dd.getDatasetType() == DatasetType.FEED) {
             Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
                     dataverseName);
@@ -740,7 +741,8 @@
         Pair<Query, Integer> reWrittenQuery = APIFramework.reWriteQuery(declaredFunctions, metadataProvider, query,
                 sessionConfig, out, pdf);
 
-        // Query Compilation (happens under the same ongoing metadata transaction)
+        // Query Compilation (happens under the same ongoing metadata
+        // transaction)
         if (metadataProvider.isWriteTransaction()) {
             metadataProvider.setJobTxnId(TransactionIDFactory.generateTransactionId());
         }
@@ -760,9 +762,9 @@
         CompiledBeginFeedStatement cbfs = new CompiledBeginFeedStatement(dataverseName,
                 bfs.getDatasetName().getValue(), bfs.getQuery(), bfs.getVarCounter());
 
-        Dataset dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName, bfs
-                .getDatasetName().getValue());
-        if(dataset == null) {
+        Dataset dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
+                bfs.getDatasetName().getValue());
+        if (dataset == null) {
             throw new AsterixException("Unknown dataset :" + bfs.getDatasetName().getValue());
         }
         IDatasetDetails datasetDetails = dataset.getDatasetDetails();
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
index 8d804a4..5d3ab0a 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -12,9 +12,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package edu.uci.ics.asterix.file;
 
 import java.io.File;
+import java.io.IOException;
 import java.rmi.RemoteException;
 import java.util.List;
 import java.util.logging.Logger;
@@ -329,7 +331,12 @@
         ISerializerDeserializer[] recordFields = new ISerializerDeserializer[1 + numKeys];
         recordFields[0] = payloadSerde;
         for (int i = 0; i < numKeys; i++) {
-            IAType keyType = itemType.getFieldType(partitioningKeys.get(i));
+            IAType keyType;
+            try {
+                keyType = itemType.getFieldType(partitioningKeys.get(i));
+            } catch (IOException e) {
+                throw new AlgebricksException(e);
+            }
             ISerializerDeserializer keySerde = dataFormat.getSerdeProvider().getSerializerDeserializer(keyType);
             recordFields[i + 1] = keySerde;
         }
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java
index 34ba208..87d3b08 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java
@@ -1,6 +1,22 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.file;
 
 import java.io.DataOutput;
+import java.io.IOException;
 import java.util.List;
 
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
@@ -137,13 +153,12 @@
         numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
         numSecondaryKeys = createIndexStmt.getKeyFields().size();
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> primarySplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(
-                        dataverseName, datasetName, datasetName);
+                .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(dataverseName, datasetName, datasetName);
         primaryFileSplitProvider = primarySplitsAndConstraint.first;
         primaryPartitionConstraint = primarySplitsAndConstraint.second;
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(
-                        dataverseName, datasetName, secondaryIndexName);
+                .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(dataverseName, datasetName,
+                        secondaryIndexName);
         secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
         secondaryPartitionConstraint = secondarySplitsAndConstraint.second;
         // Must be called in this order.
@@ -159,7 +174,12 @@
         primaryComparatorFactories = new IBinaryComparatorFactory[numPrimaryKeys];
         ISerializerDeserializerProvider serdeProvider = metadataProvider.getFormat().getSerdeProvider();
         for (int i = 0; i < numPrimaryKeys; i++) {
-            IAType keyType = itemType.getFieldType(partitioningKeys.get(i));
+            IAType keyType;
+            try {
+                keyType = itemType.getFieldType(partitioningKeys.get(i));
+            } catch (IOException e) {
+                throw new AlgebricksException(e);
+            }
             primaryRecFields[i] = serdeProvider.getSerializerDeserializer(keyType);
             primaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
                     keyType, true);
@@ -286,8 +306,8 @@
             fieldPermutation[i] = i;
         }
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(
-                        dataverseName, datasetName, secondaryIndexName);
+                .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(dataverseName, datasetName,
+                        secondaryIndexName);
         TreeIndexBulkLoadOperatorDescriptor treeIndexBulkLoadOp = new TreeIndexBulkLoadOperatorDescriptor(spec,
                 AsterixStorageManagerInterface.INSTANCE, AsterixIndexRegistryProvider.INSTANCE,
                 secondarySplitsAndConstraint.first, secondaryRecDesc.getTypeTraits(), secondaryComparatorFactories,
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java
index 54e26dd..0c25e34 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java
@@ -14,11 +14,6 @@
  */
 package edu.uci.ics.asterix.hyracks.bootstrap;
 
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
@@ -26,8 +21,6 @@
 import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.servlet.ServletHolder;
 
-import edu.uci.ics.asterix.api.aqlj.server.APIClientThreadFactory;
-import edu.uci.ics.asterix.api.aqlj.server.ThreadedServer;
 import edu.uci.ics.asterix.api.http.servlet.APIServlet;
 import edu.uci.ics.asterix.common.api.AsterixAppContextInfoImpl;
 import edu.uci.ics.asterix.api.http.servlet.DDLAPIServlet;
@@ -44,8 +37,8 @@
 import edu.uci.ics.hyracks.api.application.ICCBootstrap;
 
 /**
- * The bootstrap class of the application that will manage its
- * life cycle at the Cluster Controller.
+ * The bootstrap class of the application that will manage its life cycle at the
+ * Cluster Controller.
  */
 public class CCBootstrapImpl implements ICCBootstrap {
     private static final Logger LOGGER = Logger.getLogger(CCBootstrapImpl.class.getName());
@@ -54,14 +47,10 @@
 
     private static final int DEFAULT_JSON_API_SERVER_PORT = 19101;
 
-    public static final int DEFAULT_API_SERVER_PORT = 14600;
-    private static final int DEFAULT_API_NODEDATA_SERVER_PORT = 14601;
-
     private Server webServer;
     private Server jsonAPIServer;
     private static IAsterixStateProxy proxy;
     private ICCApplicationContext appCtx;
-    private ThreadedServer apiServer;
 
     @Override
     public void start() throws Exception {
@@ -69,15 +58,12 @@
             LOGGER.info("Starting Asterix cluster controller");
         }
 
-        // Set the AsterixStateProxy to be the distributed object
         proxy = AsterixStateProxy.registerRemoteObject();
         proxy.setAsterixProperties(AsterixProperties.INSTANCE);
         appCtx.setDistributedState(proxy);
 
-        // Create the metadata manager
         MetadataManager.INSTANCE = new MetadataManager(proxy);
 
-        // Setup and start the web interface
         setupWebServer();
         webServer.start();
 
@@ -85,11 +71,6 @@
         setupJSONAPIServer();
         jsonAPIServer.start();
 
-        // Setup and start the API server
-        setupAPIServer();
-        apiServer.start();
-
-        //Initialize AsterixAppContext
         AsterixAppContextInfoImpl.initialize(appCtx);
     }
 
@@ -101,7 +82,6 @@
         AsterixStateProxy.unregisterRemoteObject();
 
         webServer.stop();
-        apiServer.shutdown();
     }
 
     @Override
@@ -140,31 +120,4 @@
         context.addServlet(new ServletHolder(new UpdateAPIServlet()), "/update");
         context.addServlet(new ServletHolder(new DDLAPIServlet()), "/ddl");
     }
-
-    private void setupAPIServer() throws Exception {
-        // set the APINodeDataServer ports
-        int startPort = DEFAULT_API_NODEDATA_SERVER_PORT;
-        Map<String, Set<String>> nodeNameMap = new HashMap<String, Set<String>>();
-        getIPAddressNodeMap(nodeNameMap);
-
-        for (Map.Entry<String, Set<String>> entry : nodeNameMap.entrySet()) {
-            Set<String> nodeNames = entry.getValue();
-            Iterator<String> it = nodeNames.iterator();
-            while (it.hasNext()) {
-                AsterixNodeState ns = new AsterixNodeState();
-                ns.setAPINodeDataServerPort(startPort++);
-                proxy.setAsterixNodeState(it.next(), ns);
-            }
-        }
-        apiServer = new ThreadedServer(DEFAULT_API_SERVER_PORT, new APIClientThreadFactory(appCtx));
-    }
-
-    private void getIPAddressNodeMap(Map<String, Set<String>> nodeNameMap) throws IOException {
-        nodeNameMap.clear();
-        try {
-            appCtx.getCCContext().getIPAddressNodeMap(nodeNameMap);
-        } catch (Exception e) {
-            throw new IOException("Unable to obtain IP address node map", e);
-        }
-    }
-}
\ No newline at end of file
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCBootstrapImpl.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCBootstrapImpl.java
index b1e7481..185c608 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCBootstrapImpl.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCBootstrapImpl.java
@@ -19,8 +19,6 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import edu.uci.ics.asterix.api.aqlj.server.NodeDataClientThreadFactory;
-import edu.uci.ics.asterix.api.aqlj.server.ThreadedServer;
 import edu.uci.ics.asterix.common.context.AsterixAppRuntimeContext;
 import edu.uci.ics.asterix.metadata.MetadataManager;
 import edu.uci.ics.asterix.metadata.MetadataNode;
@@ -37,7 +35,6 @@
     private AsterixAppRuntimeContext runtimeContext;
     private String nodeId;
     private boolean isMetadataNode = false;
-    private ThreadedServer apiNodeDataServer;
 
     @Override
     public void start() throws Exception {
@@ -48,10 +45,8 @@
 
         runtimeContext = new AsterixAppRuntimeContext(ncApplicationContext);
         runtimeContext.initialize();
-
         ncApplicationContext.setApplicationObject(runtimeContext);
 
-        // Initialize metadata if this node is the metadata node
         IAsterixStateProxy proxy = (IAsterixStateProxy) ncApplicationContext.getDistributedState();
         isMetadataNode = nodeId.equals(proxy.getAsterixProperties().getMetadataNodeName());
         if (isMetadataNode) {
@@ -60,20 +55,11 @@
             if (LOGGER.isLoggable(Level.INFO)) {
                 LOGGER.info("Bootstrapping metadata");
             }
-
             MetadataManager.INSTANCE = new MetadataManager(proxy);
             MetadataManager.INSTANCE.init();
             MetadataBootstrap.startUniverse(proxy.getAsterixProperties(), ncApplicationContext);
-
         }
 
-        // Start a sub-component for the API server. This server is only connected to by the 
-        // API server that lives on the CC and never by a client wishing to execute AQL.
-        // TODO: The API sub-system will change dramatically in the future and this code will go away, 
-        // but leave it for now.
-        AsterixNodeState ns = (AsterixNodeState) proxy.getAsterixNodeState(nodeId);
-        apiNodeDataServer = new ThreadedServer(ns.getAPINodeDataServerPort(), new NodeDataClientThreadFactory());
-        apiNodeDataServer.start();
     }
 
     public void registerRemoteMetadataNode(IAsterixStateProxy proxy) throws RemoteException {
@@ -93,12 +79,9 @@
             LOGGER.info("Stopping Asterix node controller: " + nodeId);
         }
 
-        // Quiesce metadata
         if (isMetadataNode) {
             MetadataBootstrap.stopUniverse();
         }
-
-        apiNodeDataServer.shutdown();
         runtimeContext.deinitialize();
     }
 
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
index 4dfe106..0931994 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
@@ -5,27 +5,12 @@
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.PrintWriter;
 import java.io.Reader;
-import java.io.UnsupportedEncodingException;
-import java.nio.MappedByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.charset.Charset;
 
-import edu.uci.ics.asterix.api.aqlj.client.AQLJClientDriver;
-import edu.uci.ics.asterix.api.aqlj.client.IADMCursor;
-import edu.uci.ics.asterix.api.aqlj.client.IAQLJConnection;
-import edu.uci.ics.asterix.api.aqlj.client.IAQLJResult;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
 import edu.uci.ics.asterix.api.java.AsterixJavaClient;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.AbstractCollectionType;
-import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
 
 public class TestsUtils {
@@ -98,112 +83,6 @@
 
     }
 
-    public static void runScriptAndCompareWithResultViaClientAPI(File scriptFile, PrintWriter print, File expectedFile,
-            File actualFile, int apiPort) throws Exception {
-        FileOutputStream fos = new FileOutputStream(actualFile);
-        String query = queryFromFile(scriptFile);
-        IAQLJConnection conn = null;
-        IAQLJResult res = null;
-        try {
-            conn = AQLJClientDriver.getConnection("localhost", apiPort, "Metadata");
-            res = conn.execute(query);
-
-            while (res.next()) {
-                leafPrint(conn, res, fos);
-            }
-        } catch (AQLJException e) {
-            e.printStackTrace();
-        } finally {
-            // be sure that we close the connection and the result cursor
-            if (res != null) {
-                res.close();
-            }
-            if (conn != null) {
-                conn.close();
-            }
-        }
-        fos.close();
-
-        BufferedReader readerExpected = new BufferedReader(new InputStreamReader(new FileInputStream(expectedFile),
-                "UTF-8"));
-        BufferedReader readerActual = new BufferedReader(
-                new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
-        String lineExpected, lineActual;
-        int num = 1;
-        try {
-            while ((lineExpected = readerExpected.readLine()) != null) {
-                lineActual = readerActual.readLine();
-                // Assert.assertEquals(lineExpected, lineActual);
-                if (lineActual == null) {
-                    throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< " + lineExpected
-                            + "\n> ");
-                }
-                if (!equalStrings(lineExpected, lineActual)) {
-                    throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< " + lineExpected
-                            + "\n> " + lineActual);
-                }
-                ++num;
-            }
-            lineActual = readerActual.readLine();
-            // Assert.assertEquals(null, lineActual);
-            if (lineActual != null) {
-                throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< \n> " + lineActual);
-            }
-            actualFile.delete();
-        } finally {
-            readerExpected.close();
-            readerActual.close();
-        }
-
-    }
-
-    public static void leafPrint(IAQLJConnection conn, IADMCursor c, FileOutputStream fos) throws AQLJException,
-            UnsupportedEncodingException, IOException {
-        IAType t;
-        IAObject o;
-        String fieldNames[];
-        IADMCursor cur;
-
-        o = c.get();
-        if (o == null) {
-            return;
-        }
-
-        t = o.getType();
-        if (t instanceof AbstractCollectionType) {
-            fos.write("AbstractCollectionType: \n".getBytes("UTF-8"));
-            cur = conn.createADMCursor();
-            c.position(cur);
-            while (cur.next()) {
-
-                leafPrint(conn, cur, fos);
-            }
-        } else if (t instanceof ARecordType) {
-            fos.write("ARecordType: \n".getBytes("UTF-8"));
-            fieldNames = ((ARecordType) t).getFieldNames();
-            for (int i = 0; i < fieldNames.length; i++) {
-                cur = conn.createADMCursor();
-                c.position(cur, fieldNames[i]);
-                fos.write(("field: " + fieldNames[i] + "\n").getBytes("UTF-8"));
-                leafPrint(conn, cur, fos);
-            }
-        } else {
-            fos.write((o.toString() + "\n").getBytes("UTF-8"));
-        }
-    }
-
-    private static String queryFromFile(File f) throws IOException {
-        FileInputStream fis = new FileInputStream(f);
-        try {
-            FileChannel fc = fis.getChannel();
-            MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size());
-            return Charset.forName("UTF-8").decode(bb).toString();
-        } finally {
-            fis.close();
-        }
-
-    }
-
     private static boolean equalStrings(String s1, String s2) {
         String[] rowsOne = s1.split("\n");
         String[] rowsTwo = s2.split("\n");
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aqlj/ClientAPITest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/aqlj/ClientAPITest.java
deleted file mode 100644
index 0301675..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aqlj/ClientAPITest.java
+++ /dev/null
@@ -1,151 +0,0 @@
-package edu.uci.ics.asterix.test.aqlj;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.logging.Logger;
-
-import org.junit.AfterClass;
-import org.junit.Assume;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.internal.AssumptionViolatedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.hyracks.bootstrap.CCBootstrapImpl;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
-import edu.uci.ics.asterix.test.common.TestHelper;
-
-@RunWith(Parameterized.class)
-public class ClientAPITest {
-    private static final PrintWriter ERR = new PrintWriter(System.err);
-    private static final String EXTENSION_QUERY = "aql";
-    private static final String FILENAME_IGNORE = "ignore.txt";
-    private static final String FILENAME_ONLY = "only.txt";
-    private static final ArrayList<String> ignore = readFile(FILENAME_IGNORE);
-    private static final ArrayList<String> only = readFile(FILENAME_ONLY);
-    private static final String PATH_ACTUAL = "aqljtest/";
-    private static final String PATH_BASE = "src/test/resources/aqljts/";
-    private static final String PATH_EXPECTED = PATH_BASE + "results/";
-    private static final String PATH_QUERIES = PATH_BASE + "queries/";
-    private static final String SEPARATOR = System.getProperty("file.separator");
-
-    private static final String TEST_CONFIG_FILE_NAME = "test.properties";
-    private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
-
-    private static final Logger LOGGER = Logger.getLogger(ClientAPITest.class.getName());
-
-    private static ArrayList<String> readFile(String fileName) {
-        ArrayList<String> list = new ArrayList<String>();
-        BufferedReader result;
-        try {
-            result = new BufferedReader(new InputStreamReader(new FileInputStream(PATH_BASE + fileName), "UTF-8"));
-            while (true) {
-                String line = result.readLine();
-                if (line == null) {
-                    break;
-                } else {
-                    String s = line.trim();
-                    if (s.length() > 0) {
-                        list.add(s);
-                    }
-                }
-            }
-            result.close();
-        } catch (FileNotFoundException e) {
-        } catch (IOException e) {
-        }
-        return list;
-    }
-
-    @BeforeClass
-    public static void setUp() throws Exception {
-        System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
-        System.setProperty(GlobalConfig.WEB_SERVER_PORT_PROPERTY, "19002");
-        File outdir = new File(PATH_ACTUAL);
-        outdir.mkdirs();
-        AsterixHyracksIntegrationUtil.init();
-        // _bootstrap.start();
-    }
-
-    @AfterClass
-    public static void tearDown() throws Exception {
-        // _bootstrap.stop();
-        AsterixHyracksIntegrationUtil.deinit();
-        File outdir = new File(PATH_ACTUAL);
-        File[] files = outdir.listFiles();
-        if (files == null || files.length == 0) {
-            outdir.delete();
-        }
-        // clean up the files written by the ASTERIX storage manager
-        for (String d : ASTERIX_DATA_DIRS) {
-            TestsUtils.deleteRec(new File(d));
-        }
-    }
-
-    private static void suiteBuild(File dir, Collection<Object[]> testArgs, String path) {
-        for (File file : dir.listFiles()) {
-            if (file.isDirectory() && !file.getName().startsWith(".")) {
-                suiteBuild(file, testArgs, path + file.getName() + SEPARATOR);
-            }
-            if (file.isFile() && file.getName().endsWith(EXTENSION_QUERY)
-            // && !ignore.contains(path + file.getName())
-            ) {
-                String resultFileName = TestsUtils.aqlExtToResExt(file.getName());
-                File expectedFile = new File(PATH_EXPECTED + path + resultFileName);
-                File actualFile = new File(PATH_ACTUAL + SEPARATOR + path.replace(SEPARATOR, "_") + resultFileName);
-                testArgs.add(new Object[] { file, expectedFile, actualFile });
-            }
-        }
-    }
-
-    @Parameters
-    public static Collection<Object[]> tests() {
-        Collection<Object[]> testArgs = new ArrayList<Object[]>();
-        suiteBuild(new File(PATH_QUERIES), testArgs, "");
-        return testArgs;
-    }
-
-    private File actualFile;
-    private File expectedFile;
-    private File queryFile;
-
-    public ClientAPITest(File queryFile, File expectedFile, File actualFile) {
-        this.queryFile = queryFile;
-        this.expectedFile = expectedFile;
-        this.actualFile = actualFile;
-    }
-
-    @Test
-    public void test() throws Exception {
-        try {
-            String queryFileShort = queryFile.getPath().substring(PATH_QUERIES.length())
-                    .replace(SEPARATOR.charAt(0), '/');
-
-            if (!only.isEmpty()) {
-                Assume.assumeTrue(TestHelper.isInPrefixList(only, queryFileShort));
-            }
-            Assume.assumeTrue(!TestHelper.isInPrefixList(ignore, queryFileShort));
-            LOGGER.severe("RUNNING TEST: " + queryFile + " \n");
-            TestsUtils.runScriptAndCompareWithResultViaClientAPI(queryFile, ERR, expectedFile, actualFile,
-                    CCBootstrapImpl.DEFAULT_API_SERVER_PORT);
-        } catch (Exception e) {
-            if (!(e instanceof AssumptionViolatedException)) {
-                LOGGER.severe("Test \"" + queryFile.getPath() + "\" FAILED!");
-                throw new Exception("Test \"" + queryFile.getPath() + "\" FAILED!", e);
-            } else {
-                throw e;
-            }
-        }
-    }
-}
diff --git a/asterix-app/src/test/resources/AQLTS/queries/del-dataset.aql b/asterix-app/src/test/resources/AQLTS/queries/del-dataset.aql
index 51e4344..1660e44 100644
--- a/asterix-app/src/test/resources/AQLTS/queries/del-dataset.aql
+++ b/asterix-app/src/test/resources/AQLTS/queries/del-dataset.aql
@@ -24,6 +24,6 @@
 create nodegroup group1  if not exists on nc1, nc2;
 
 create dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 drop dataset Customers;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/AQLTS/queries/load-del-dataset.aql b/asterix-app/src/test/resources/AQLTS/queries/load-del-dataset.aql
index f70de14..576a9da 100644
--- a/asterix-app/src/test/resources/AQLTS/queries/load-del-dataset.aql
+++ b/asterix-app/src/test/resources/AQLTS/queries/load-del-dataset.aql
@@ -24,7 +24,7 @@
 create nodegroup group1  if not exists on nc1, nc2;
 
 create dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/aqljts/ignore.txt b/asterix-app/src/test/resources/aqljts/ignore.txt
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/aqljts/ignore.txt
+++ /dev/null
diff --git a/asterix-app/src/test/resources/aqljts/only.txt b/asterix-app/src/test/resources/aqljts/only.txt
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/aqljts/only.txt
+++ /dev/null
diff --git a/asterix-app/src/test/resources/aqljts/queries/gram-tokens_01.aql b/asterix-app/src/test/resources/aqljts/queries/gram-tokens_01.aql
deleted file mode 100644
index 5f2b961..0000000
--- a/asterix-app/src/test/resources/aqljts/queries/gram-tokens_01.aql
+++ /dev/null
@@ -1,4 +0,0 @@
-let $txt := "Jürgen S. Generic's Car"
-let $tokens := gram-tokens($txt, 3, false)
-for $token in $tokens
-return $token
diff --git a/asterix-app/src/test/resources/aqljts/queries/nested_01.aql b/asterix-app/src/test/resources/aqljts/queries/nested_01.aql
deleted file mode 100644
index 968f314..0000000
--- a/asterix-app/src/test/resources/aqljts/queries/nested_01.aql
+++ /dev/null
@@ -1,9 +0,0 @@
-for $x in [1]
-return {"x": 
-			[{"r": [["a", "b", "c"], ["d", "e", "f"]]}],
-		"y":
-			[{"s": [[1, 2, 3], [4, 5, 6]]}],
-		"z":
-			[{"t": [datetime("2011-09-16T11:00:00Z")]}]
-		}
-		
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/aqljts/results/gram-tokens_01.adm b/asterix-app/src/test/resources/aqljts/results/gram-tokens_01.adm
deleted file mode 100644
index f0f59c3..0000000
--- a/asterix-app/src/test/resources/aqljts/results/gram-tokens_01.adm
+++ /dev/null
@@ -1,21 +0,0 @@
-AString: {jür}
-AString: {ürg}
-AString: {rge}
-AString: {gen}
-AString: {en }
-AString: {n s}
-AString: { s.}
-AString: {s. }
-AString: {. g}
-AString: { ge}
-AString: {gen}
-AString: {ene}
-AString: {ner}
-AString: {eri}
-AString: {ric}
-AString: {ic'}
-AString: {c's}
-AString: {'s }
-AString: {s c}
-AString: { ca}
-AString: {car}
diff --git a/asterix-app/src/test/resources/aqljts/results/nested_01.adm b/asterix-app/src/test/resources/aqljts/results/nested_01.adm
deleted file mode 100644
index 27de5c0..0000000
--- a/asterix-app/src/test/resources/aqljts/results/nested_01.adm
+++ /dev/null
@@ -1,33 +0,0 @@
-ARecordType: 
-field: x
-AbstractCollectionType: 
-ARecordType: 
-field: r
-AbstractCollectionType: 
-AbstractCollectionType: 
-AString: {a}
-AString: {b}
-AString: {c}
-AbstractCollectionType: 
-AString: {d}
-AString: {e}
-AString: {f}
-field: y
-AbstractCollectionType: 
-ARecordType: 
-field: s
-AbstractCollectionType: 
-AbstractCollectionType: 
-AInt32: {1}
-AInt32: {2}
-AInt32: {3}
-AbstractCollectionType: 
-AInt32: {4}
-AInt32: {5}
-AInt32: {6}
-field: z
-AbstractCollectionType: 
-ARecordType: 
-field: t
-AbstractCollectionType: 
-ADateTime: { 2011-09-16T11:00:00.000Z }
diff --git a/asterix-app/src/test/resources/dapd/denorm_user_event.aql b/asterix-app/src/test/resources/dapd/denorm_user_event.aql
index a3b9fab..fe79872 100644
--- a/asterix-app/src/test/resources/dapd/denorm_user_event.aql
+++ b/asterix-app/src/test/resources/dapd/denorm_user_event.aql
@@ -27,8 +27,8 @@
 
 declare nodegroup group1 on nc1, nc2;
 
-declare dataset Users(UserType) partitioned by key u_name on group1;
-declare dataset Events(EventType) partitioned by key e_name on group1;
+declare dataset Users(UserType) primary key u_name on group1;
+declare dataset Events(EventType) primary key e_name on group1;
 
 write output to nc1:"/tmp/denorm_user_event.adm";
 
diff --git a/asterix-app/src/test/resources/dapd/q1.aql b/asterix-app/src/test/resources/dapd/q1.aql
index ae6b28f..934387b 100644
--- a/asterix-app/src/test/resources/dapd/q1.aql
+++ b/asterix-app/src/test/resources/dapd/q1.aql
@@ -23,7 +23,7 @@
 
 declare nodegroup group1 on nc1, nc2;
 
-declare dataset User(UserType) partitioned by key name on group1;
+declare dataset User(UserType) primary key name on group1;
 
 write output to nc1:"/tmp/q1.adm";
 
diff --git a/asterix-app/src/test/resources/dapd/q2.aql b/asterix-app/src/test/resources/dapd/q2.aql
index 3998b9e..fdb0469 100644
--- a/asterix-app/src/test/resources/dapd/q2.aql
+++ b/asterix-app/src/test/resources/dapd/q2.aql
@@ -37,7 +37,7 @@
 
 drop dataset Event;
 declare dataset Event(EventType) 
-  partitioned by key event_id on group1;
+  primary key event_id on group1;
 
 load dataset Event 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo0216/01-load-dblp-large.aql b/asterix-app/src/test/resources/demo0216/01-load-dblp-large.aql
index 0cb6b21..171c935 100644
--- a/asterix-app/src/test/resources/demo0216/01-load-dblp-large.aql
+++ b/asterix-app/src/test/resources/demo0216/01-load-dblp-large.aql
@@ -12,7 +12,7 @@
 rainbow-04, rainbow-05;
 
 declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo0216/03-load-dblp-small.aql b/asterix-app/src/test/resources/demo0216/03-load-dblp-small.aql
index f4d8ae0..751179f 100644
--- a/asterix-app/src/test/resources/demo0216/03-load-dblp-small.aql
+++ b/asterix-app/src/test/resources/demo0216/03-load-dblp-small.aql
@@ -12,7 +12,7 @@
 rainbow-04, rainbow-05;
 
 declare dataset DBLPSmall(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
 
 load dataset DBLPSmall 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo0216/110-self-join-dblp.aql b/asterix-app/src/test/resources/demo0216/110-self-join-dblp.aql
index 9077f33..dc40196 100644
--- a/asterix-app/src/test/resources/demo0216/110-self-join-dblp.aql
+++ b/asterix-app/src/test/resources/demo0216/110-self-join-dblp.aql
@@ -12,7 +12,7 @@
 rainbow-04, rainbow-05;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to rainbow-01:"/home/hyracks/dblp-self-join.adm";
 
diff --git a/asterix-app/src/test/resources/demo0216/120-self-join-dblp-small.aql b/asterix-app/src/test/resources/demo0216/120-self-join-dblp-small.aql
index 66ca6cb..5802236 100644
--- a/asterix-app/src/test/resources/demo0216/120-self-join-dblp-small.aql
+++ b/asterix-app/src/test/resources/demo0216/120-self-join-dblp-small.aql
@@ -12,7 +12,7 @@
 rainbow-04, rainbow-05;
 
 declare dataset DBLPSmall(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to rainbow-01:"/home/hyracks/small-dblp-self-join.adm";
 
diff --git a/asterix-app/src/test/resources/demo0927/local/create-index.aql b/asterix-app/src/test/resources/demo0927/local/create-index.aql
index 3ecd163..dd41fc8 100644
--- a/asterix-app/src/test/resources/demo0927/local/create-index.aql
+++ b/asterix-app/src/test/resources/demo0927/local/create-index.aql
@@ -20,6 +20,6 @@
 declare nodegroup group1 on nc1,nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 create index NameIndex on Customers(name);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/demo0927/local/dataset-filter-int.aql b/asterix-app/src/test/resources/demo0927/local/dataset-filter-int.aql
index 3acb8f8..c50405b 100644
--- a/asterix-app/src/test/resources/demo0927/local/dataset-filter-int.aql
+++ b/asterix-app/src/test/resources/demo0927/local/dataset-filter-int.aql
@@ -20,7 +20,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to nc1:"/tmp/mycustomers.adm";
 
diff --git a/asterix-app/src/test/resources/demo0927/local/dataset-filter-str.aql b/asterix-app/src/test/resources/demo0927/local/dataset-filter-str.aql
index 35401cf..89242ea 100644
--- a/asterix-app/src/test/resources/demo0927/local/dataset-filter-str.aql
+++ b/asterix-app/src/test/resources/demo0927/local/dataset-filter-str.aql
@@ -20,7 +20,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to nc1:"/tmp/mycustomers.adm";
 
diff --git a/asterix-app/src/test/resources/demo0927/local/dataset-scan.aql b/asterix-app/src/test/resources/demo0927/local/dataset-scan.aql
index 2f8746c..230b6f2 100644
--- a/asterix-app/src/test/resources/demo0927/local/dataset-scan.aql
+++ b/asterix-app/src/test/resources/demo0927/local/dataset-scan.aql
@@ -20,7 +20,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to nc1:"/tmp/mycustomers.adm";
 
diff --git a/asterix-app/src/test/resources/demo0927/local/declare-index.aql b/asterix-app/src/test/resources/demo0927/local/declare-index.aql
index ecacc49..49c0a31 100644
--- a/asterix-app/src/test/resources/demo0927/local/declare-index.aql
+++ b/asterix-app/src/test/resources/demo0927/local/declare-index.aql
@@ -20,6 +20,6 @@
 declare nodegroup group1 on nc1,nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 declare index NameIndex on Customers(name);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/demo0927/local/join-cust-ord.aql b/asterix-app/src/test/resources/demo0927/local/join-cust-ord.aql
index b262860..6630507 100644
--- a/asterix-app/src/test/resources/demo0927/local/join-cust-ord.aql
+++ b/asterix-app/src/test/resources/demo0927/local/join-cust-ord.aql
@@ -30,9 +30,9 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to nc1:"/tmp/custorder.adm";
 
diff --git a/asterix-app/src/test/resources/demo0927/local/load-cust.aql b/asterix-app/src/test/resources/demo0927/local/load-cust.aql
index b71731c..885b226 100644
--- a/asterix-app/src/test/resources/demo0927/local/load-cust.aql
+++ b/asterix-app/src/test/resources/demo0927/local/load-cust.aql
@@ -20,7 +20,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo0927/local/load-ord.aql b/asterix-app/src/test/resources/demo0927/local/load-ord.aql
index 15ba4cf..6fba4b1 100644
--- a/asterix-app/src/test/resources/demo0927/local/load-ord.aql
+++ b/asterix-app/src/test/resources/demo0927/local/load-ord.aql
@@ -12,7 +12,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 load dataset Orders from nc1:"/tmp/orderData.json";
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo0927/rainbow/01-load-cust.aql b/asterix-app/src/test/resources/demo0927/rainbow/01-load-cust.aql
index 74502a0..ed750e5 100644
--- a/asterix-app/src/test/resources/demo0927/rainbow/01-load-cust.aql
+++ b/asterix-app/src/test/resources/demo0927/rainbow/01-load-cust.aql
@@ -20,7 +20,7 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers from rainbow-01:"/home/hyracks/demo-data/customerData.json";
 //  delete dataset Customers;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/demo0927/rainbow/02-filter-cust.aql b/asterix-app/src/test/resources/demo0927/rainbow/02-filter-cust.aql
index 2ed1304..d8070ef 100644
--- a/asterix-app/src/test/resources/demo0927/rainbow/02-filter-cust.aql
+++ b/asterix-app/src/test/resources/demo0927/rainbow/02-filter-cust.aql
@@ -20,7 +20,7 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to rainbow-01:"/home/hyracks/mycustomers.adm";
 
diff --git a/asterix-app/src/test/resources/demo0927/rainbow/03-load-ord.aql b/asterix-app/src/test/resources/demo0927/rainbow/03-load-ord.aql
index a9bfe61..0fb5398 100644
--- a/asterix-app/src/test/resources/demo0927/rainbow/03-load-ord.aql
+++ b/asterix-app/src/test/resources/demo0927/rainbow/03-load-ord.aql
@@ -12,7 +12,7 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 load dataset Orders from rainbow-01:"/home/hyracks/demo-data/orderData.json";
 // delete dataset Orders;
diff --git a/asterix-app/src/test/resources/demo0927/rainbow/04-join-custord.aql b/asterix-app/src/test/resources/demo0927/rainbow/04-join-custord.aql
index 57cc397..d89a799 100644
--- a/asterix-app/src/test/resources/demo0927/rainbow/04-join-custord.aql
+++ b/asterix-app/src/test/resources/demo0927/rainbow/04-join-custord.aql
@@ -30,9 +30,9 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to rainbow-01:"/home/hyracks/custorder.adm";
 
diff --git a/asterix-app/src/test/resources/demo1112/local/01-load-cust.aql b/asterix-app/src/test/resources/demo1112/local/01-load-cust.aql
index 8d764c3..dd7d2a9 100644
--- a/asterix-app/src/test/resources/demo1112/local/01-load-cust.aql
+++ b/asterix-app/src/test/resources/demo1112/local/01-load-cust.aql
@@ -20,7 +20,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo1112/local/02-filter-cust.aql b/asterix-app/src/test/resources/demo1112/local/02-filter-cust.aql
index 24c5827..eb3f9a8 100644
--- a/asterix-app/src/test/resources/demo1112/local/02-filter-cust.aql
+++ b/asterix-app/src/test/resources/demo1112/local/02-filter-cust.aql
@@ -20,7 +20,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to nc1:"/tmp/mycustomers.adm";
 
diff --git a/asterix-app/src/test/resources/demo1112/local/03-load-ord.aql b/asterix-app/src/test/resources/demo1112/local/03-load-ord.aql
index 7db4f4e..83657df 100644
--- a/asterix-app/src/test/resources/demo1112/local/03-load-ord.aql
+++ b/asterix-app/src/test/resources/demo1112/local/03-load-ord.aql
@@ -12,7 +12,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 load dataset Orders 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo1112/local/04-join-custord.aql b/asterix-app/src/test/resources/demo1112/local/04-join-custord.aql
index 1238e7e..fa82c9a 100644
--- a/asterix-app/src/test/resources/demo1112/local/04-join-custord.aql
+++ b/asterix-app/src/test/resources/demo1112/local/04-join-custord.aql
@@ -30,9 +30,9 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to nc1:"/tmp/custorder.adm";
 
diff --git a/asterix-app/src/test/resources/demo1112/rainbow/01-load-cust.aql b/asterix-app/src/test/resources/demo1112/rainbow/01-load-cust.aql
index ccb959d..f8c34fb 100644
--- a/asterix-app/src/test/resources/demo1112/rainbow/01-load-cust.aql
+++ b/asterix-app/src/test/resources/demo1112/rainbow/01-load-cust.aql
@@ -20,7 +20,7 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers from rainbow-01:"/home/onose/demo-data/customerData.adm";
 // drop dataset Customers;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/demo1112/rainbow/02-filter-cust.aql b/asterix-app/src/test/resources/demo1112/rainbow/02-filter-cust.aql
index 2c08f34..2993b92 100644
--- a/asterix-app/src/test/resources/demo1112/rainbow/02-filter-cust.aql
+++ b/asterix-app/src/test/resources/demo1112/rainbow/02-filter-cust.aql
@@ -20,7 +20,7 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to rainbow-01:"/home/onose/hyracks-rainbow/results/mycustomers.adm";
 
diff --git a/asterix-app/src/test/resources/demo1112/rainbow/03-load-ord.aql b/asterix-app/src/test/resources/demo1112/rainbow/03-load-ord.aql
index 1329aa4..3134818 100644
--- a/asterix-app/src/test/resources/demo1112/rainbow/03-load-ord.aql
+++ b/asterix-app/src/test/resources/demo1112/rainbow/03-load-ord.aql
@@ -12,6 +12,6 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 load dataset Orders from rainbow-01:"/home/onose/demo-data/orderData.adm";
diff --git a/asterix-app/src/test/resources/demo1112/rainbow/04-join-custord.aql b/asterix-app/src/test/resources/demo1112/rainbow/04-join-custord.aql
index 63c4670..18426d4 100644
--- a/asterix-app/src/test/resources/demo1112/rainbow/04-join-custord.aql
+++ b/asterix-app/src/test/resources/demo1112/rainbow/04-join-custord.aql
@@ -30,9 +30,9 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to rainbow-01:"/home/onose/hyracks-rainbow/results/custorder.adm";
 
diff --git a/asterix-app/src/test/resources/demo1112/rainbow/05-count-custord.aql b/asterix-app/src/test/resources/demo1112/rainbow/05-count-custord.aql
index dcbea09..f55ccbd 100644
--- a/asterix-app/src/test/resources/demo1112/rainbow/05-count-custord.aql
+++ b/asterix-app/src/test/resources/demo1112/rainbow/05-count-custord.aql
@@ -30,9 +30,9 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to rainbow-01:"/home/onose/hyracks-rainbow/results/custorder.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/local/01-load-cust.aql b/asterix-app/src/test/resources/demo_aql/local/01-load-cust.aql
index cd7a682..0cbbf5b 100644
--- a/asterix-app/src/test/resources/demo_aql/local/01-load-cust.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/01-load-cust.aql
@@ -18,7 +18,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo_aql/local/02-filter-cust.aql b/asterix-app/src/test/resources/demo_aql/local/02-filter-cust.aql
index 38171ed..f631afc 100644
--- a/asterix-app/src/test/resources/demo_aql/local/02-filter-cust.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/02-filter-cust.aql
@@ -18,7 +18,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to nc1:"/tmp/02-filter-cust.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/local/03-count-cust-age.aql b/asterix-app/src/test/resources/demo_aql/local/03-count-cust-age.aql
index 37698a7..1bba3f6 100644
--- a/asterix-app/src/test/resources/demo_aql/local/03-count-cust-age.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/03-count-cust-age.aql
@@ -18,7 +18,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to nc1:"/tmp/03-count-cust-age.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/local/04-load-ord.aql b/asterix-app/src/test/resources/demo_aql/local/04-load-ord.aql
index b8898f5..00f4f10 100644
--- a/asterix-app/src/test/resources/demo_aql/local/04-load-ord.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/04-load-ord.aql
@@ -12,7 +12,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 load dataset Orders 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo_aql/local/05-count-param1.aql b/asterix-app/src/test/resources/demo_aql/local/05-count-param1.aql
index 02263c4..f7cc468 100644
--- a/asterix-app/src/test/resources/demo_aql/local/05-count-param1.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/05-count-param1.aql
@@ -12,7 +12,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to nc1:"/tmp/05-count-param1.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/local/06-count-custord.aql b/asterix-app/src/test/resources/demo_aql/local/06-count-custord.aql
index 86e6ea0..3ff3a43 100644
--- a/asterix-app/src/test/resources/demo_aql/local/06-count-custord.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/06-count-custord.aql
@@ -28,9 +28,9 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to nc1:"/tmp/06-count-custord.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/local/101-load-dblp.aql b/asterix-app/src/test/resources/demo_aql/local/101-load-dblp.aql
index 7d6dc62..03cf233 100644
--- a/asterix-app/src/test/resources/demo_aql/local/101-load-dblp.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/101-load-dblp.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo_aql/local/102-fuzzy-select.aql b/asterix-app/src/test/resources/demo_aql/local/102-fuzzy-select.aql
index e37b49a..2cea3c5 100644
--- a/asterix-app/src/test/resources/demo_aql/local/102-fuzzy-select.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/102-fuzzy-select.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
 
 write output to nc1:"/tmp/102-fuzzy-select.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/local/110-self-join-dblp.aql b/asterix-app/src/test/resources/demo_aql/local/110-self-join-dblp.aql
index f51e455..64a65a5 100644
--- a/asterix-app/src/test/resources/demo_aql/local/110-self-join-dblp.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/110-self-join-dblp.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:"/tmp/110-self-join-dblp.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/01-load-cust.aql b/asterix-app/src/test/resources/demo_aql/rainbow/01-load-cust.aql
index e75bce2..143f55f 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/01-load-cust.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/01-load-cust.aql
@@ -18,7 +18,7 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers from rainbow-01:"/home/onose/demo-data/semistructured/customer.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/02-filter-cust.aql b/asterix-app/src/test/resources/demo_aql/rainbow/02-filter-cust.aql
index 8af2368..a99e088 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/02-filter-cust.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/02-filter-cust.aql
@@ -18,7 +18,7 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to rainbow-01:"/home/onose/hyracks-rainbow/results/02-filter-cust.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/03-count-cust-age.aql b/asterix-app/src/test/resources/demo_aql/rainbow/03-count-cust-age.aql
index eda4015..714c60a 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/03-count-cust-age.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/03-count-cust-age.aql
@@ -18,7 +18,7 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to rainbow-01:"/home/onose/hyracks-rainbow/results/03-count-cust-age.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/04-load-ord.aql b/asterix-app/src/test/resources/demo_aql/rainbow/04-load-ord.aql
index bae97e8..be7a874 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/04-load-ord.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/04-load-ord.aql
@@ -12,6 +12,6 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 load dataset Orders from rainbow-01:"/home/onose/demo-data/semistructured/orders.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/05-count-param1.aql b/asterix-app/src/test/resources/demo_aql/rainbow/05-count-param1.aql
index f5ec449..0ea243a 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/05-count-param1.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/05-count-param1.aql
@@ -12,7 +12,7 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to rainbow-01:"/home/onose/hyracks-rainbow/results/05-count-param1.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/06-count-custord.aql b/asterix-app/src/test/resources/demo_aql/rainbow/06-count-custord.aql
index ba5d5f4..2502d48a 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/06-count-custord.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/06-count-custord.aql
@@ -29,9 +29,9 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to rainbow-01:"/home/onose/hyracks-rainbow/results/06-count-custord.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/101-load-dblp.aql b/asterix-app/src/test/resources/demo_aql/rainbow/101-load-dblp.aql
index 20f0b51..9ed4a85 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/101-load-dblp.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/101-load-dblp.aql
@@ -12,7 +12,7 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
 
 load dataset DBLP from
   rainbow-01:"/home/onose/demo-data/dblp-id.txt" delimited by ":";
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/102-fuzzy-select.aql b/asterix-app/src/test/resources/demo_aql/rainbow/102-fuzzy-select.aql
index c9f072a..af129cb 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/102-fuzzy-select.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/102-fuzzy-select.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
 
 write output to rainbow-01:"/home/onose/hyracks-rainbow/results/102-fuzzy-select.adm";
 
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/110-self-join-dblp.aql b/asterix-app/src/test/resources/demo_aql/rainbow/110-self-join-dblp.aql
index c5b900b..37f06a0 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/110-self-join-dblp.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/110-self-join-dblp.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to rainbow-01:"/home/onose/hyracks-rainbow/results/110-self-join-dblp.adm";
 
diff --git a/asterix-app/src/test/resources/dmlts/scripts/enlist-scan-cust.aql b/asterix-app/src/test/resources/dmlts/scripts/enlist-scan-cust.aql
index ed44308..5bc029d 100644
--- a/asterix-app/src/test/resources/dmlts/scripts/enlist-scan-cust.aql
+++ b/asterix-app/src/test/resources/dmlts/scripts/enlist-scan-cust.aql
@@ -24,7 +24,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 enlist dataset Customers;
 
diff --git a/asterix-app/src/test/resources/dmlts/scripts/load-cust.aql b/asterix-app/src/test/resources/dmlts/scripts/load-cust.aql
index d55dfed..77d023e 100644
--- a/asterix-app/src/test/resources/dmlts/scripts/load-cust.aql
+++ b/asterix-app/src/test/resources/dmlts/scripts/load-cust.aql
@@ -24,7 +24,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx-small.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx-small.aql
index f7dcf0e..5e819bd 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx-small.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx-small.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset CSXSmall(CSXType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset CSXSmall 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx.aql
index b5787a1..0b6da50 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset CSX(CSXType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset CSX 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-dblp.aql
index 3ca85bb..c726018f 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-dblp.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1; 
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/20-drop-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/20-drop-dblp.aql
index 868d534..66af9ce 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/20-drop-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/20-drop-dblp.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 drop dataset DBLP;
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/30-filter-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/30-filter-dblp.aql
index 45bd1c8..4a6b486 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/30-filter-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/30-filter-dblp.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:"/tmp/amerix.adm";
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/40-self-join-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/40-self-join-dblp.aql
index bc024c3..60eafd3 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/40-self-join-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/40-self-join-dblp.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1; 
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:"/tmp/amerix.adm";
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/50-self-join-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/50-self-join-dblp.aql
index eb44be7..536b549 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/50-self-join-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/50-self-join-dblp.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:"/tmp/amerix.adm";
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/dblp/000-1-char-at.aql b/asterix-app/src/test/resources/fuzzyjoin/dblp/000-1-char-at.aql
index 43a595d..182a17d 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/dblp/000-1-char-at.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/dblp/000-1-char-at.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:"/tmp/dblp.adm";
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/dblp/010-load.aql b/asterix-app/src/test/resources/fuzzyjoin/dblp/010-load.aql
index a857935..ab93f83 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/dblp/010-load.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/dblp/010-load.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/dblp/020-drop.aql b/asterix-app/src/test/resources/fuzzyjoin/dblp/020-drop.aql
index 65213dd..c82183b 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/dblp/020-drop.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/dblp/020-drop.aql
@@ -11,6 +11,6 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 drop dataset DBLP;
diff --git a/asterix-app/src/test/resources/fuzzyjoin/dblp/030-filter.aql b/asterix-app/src/test/resources/fuzzyjoin/dblp/030-filter.aql
index b539aba..2588e99 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/dblp/030-filter.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/dblp/030-filter.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:'/tmp/dblp.adm';
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/dblp/040-self-join-aql.aql b/asterix-app/src/test/resources/fuzzyjoin/dblp/040-self-join-aql.aql
index 63e1a1e..4ac1a60 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/dblp/040-self-join-aql.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/dblp/040-self-join-aql.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:'/tmp/dblp.adm';
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/dblp/050-self-join-op.aql b/asterix-app/src/test/resources/fuzzyjoin/dblp/050-self-join-op.aql
index f2467f2..e97df35 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/dblp/050-self-join-op.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/dblp/050-self-join-op.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:'/tmp/dblp.adm';
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/010-load-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/010-load-dblp.aql
index 49d07ec..7ae0e30 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/010-load-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/010-load-dblp.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 // load dataset DBLP from nc1:'/asterix/asterix-app/data/pub-small/dblp-small-id.txt'
 load dataset DBLP 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/020-drop-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/020-drop-dblp.aql
index 65213dd..c82183b 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/020-drop-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/020-drop-dblp.aql
@@ -11,6 +11,6 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 drop dataset DBLP;
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/030-filter-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/030-filter-dblp.aql
index d67d58a..7950d12 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/030-filter-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/030-filter-dblp.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:'/tmp/pub.adm';
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/040-load-csx.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/040-load-csx.aql
index 06dfb4e..a87398d 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/040-load-csx.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/040-load-csx.aql
@@ -11,7 +11,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset CSX(CSXType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 // load dataset CSX from nc1:'/asterix/asterix-app/data/pub-small/csx-small-id.txt'
 load dataset  CSX
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/050-drop-csx.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/050-drop-csx.aql
index c37c8b5..62e53ca 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/050-drop-csx.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/050-drop-csx.aql
@@ -11,6 +11,6 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset CSX(CSXType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 drop dataset CSX;
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/060-filter-csx.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/060-filter-csx.aql
index 56b6dc9..daf53d3 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/060-filter-csx.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/060-filter-csx.aql
@@ -13,7 +13,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset CSX(CSXType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:'/tmp/pub.adm';
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/070-join-aql.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/070-join-aql.aql
index ec88679..a72fa03 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/070-join-aql.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/070-join-aql.aql
@@ -19,10 +19,10 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 declare dataset CSX(CSXType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:'/tmp/pub.adm';
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/080-join-op.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/080-join-op.aql
index 032031f..0466033 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/080-join-op.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/080-join-op.aql
@@ -19,10 +19,10 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 declare dataset CSX(CSXType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:'/tmp/pub.adm';
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/010-load-users.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/010-load-users.aql
index be75fa6..affd0f3 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/010-load-users.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/010-load-users.aql
@@ -10,7 +10,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Users(UserType) 
-  partitioned by key uid on group1;
+  primary key uid on group1;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/020-drop-users.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/020-drop-users.aql
index 86525a7..71df5ae 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/020-drop-users.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/020-drop-users.aql
@@ -10,6 +10,6 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Users(UserType) 
-  partitioned by key uid on group1;
+  primary key uid on group1;
 
 drop dataset Users;
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/030-filter-users.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/030-filter-users.aql
index 298c0b1..4e6846f 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/030-filter-users.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/030-filter-users.aql
@@ -10,7 +10,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Users(UserType) 
-  partitioned by key uid on group1;
+  primary key uid on group1;
 
 write output to nc1:'/tmp/users.adm';
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/040-load-visitors.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/040-load-visitors.aql
index a75989a..0e07e69 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/040-load-visitors.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/040-load-visitors.aql
@@ -10,7 +10,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Visitors(VisitorType) 
-  partitioned by key vid on group1;
+  primary key vid on group1;
 
 load dataset Visitors 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/050-drop-visitors.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/050-drop-visitors.aql
index 15047b8..3a8e725 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/050-drop-visitors.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/050-drop-visitors.aql
@@ -10,6 +10,6 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Visitors(VisitorType) 
-  partitioned by key vid on group1;
+  primary key vid on group1;
 
 drop dataset Visitors;
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/060-fililter-visitors.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/060-fililter-visitors.aql
index dae180e..1fb0a4c 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/060-fililter-visitors.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/060-fililter-visitors.aql
@@ -10,7 +10,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Visitors(VisitorType) 
-  partitioned by key vid on group1;
+  primary key vid on group1;
 
 write output to nc1:'/tmp/visitors.adm';
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/070-join-aql-lottery_numbers.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/070-join-aql-lottery_numbers.aql
index aaa145f..c5a625a 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/070-join-aql-lottery_numbers.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/070-join-aql-lottery_numbers.aql
@@ -17,9 +17,9 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Users(UserType) 
-  partitioned by key uid on group1;
+  primary key uid on group1;
 declare dataset Visitors(VisitorType) 
-  partitioned by key vid on group1;
+  primary key vid on group1;
 
 write output to nc1:'/tmp/users-visitors.adm';
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/080-join-op-lottery_numbers.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/080-join-op-lottery_numbers.aql
index ca5bebb..4275067 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/080-join-op-lottery_numbers.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/080-join-op-lottery_numbers.aql
@@ -17,9 +17,9 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Users(UserType) 
-  partitioned by key uid on group1;
+  primary key uid on group1;
 declare dataset Visitors(VisitorType) 
-  partitioned by key vid on group1;
+  primary key vid on group1;
 
 write output to nc1:'/tmp/users-visitors.adm';
 
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/090-join-op-interests.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/090-join-op-interests.aql
index a7cf0a4..856e02f 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/090-join-op-interests.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/090-join-op-interests.aql
@@ -17,9 +17,9 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Users(UserType) 
-  partitioned by key uid on group1;
+  primary key uid on group1;
 declare dataset Visitors(VisitorType) 
-  partitioned by key vid on group1;
+  primary key vid on group1;
 
 write output to nc1:'/tmp/users-visitors.adm';
 
diff --git a/asterix-app/src/test/resources/integration/queries/dataset-scan.aql b/asterix-app/src/test/resources/integration/queries/dataset-scan.aql
index 1930be2..f7558c5 100644
--- a/asterix-app/src/test/resources/integration/queries/dataset-scan.aql
+++ b/asterix-app/src/test/resources/integration/queries/dataset-scan.aql
@@ -20,7 +20,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 for $c in dataset('Customers')
 return $c
diff --git a/asterix-app/src/test/resources/integration/updates/load-dataset.aql b/asterix-app/src/test/resources/integration/updates/load-dataset.aql
index c6faa6e..4041450 100644
--- a/asterix-app/src/test/resources/integration/updates/load-dataset.aql
+++ b/asterix-app/src/test/resources/integration/updates/load-dataset.aql
@@ -15,7 +15,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/metadata-transactions/init-state-queries/customers_orders.aql b/asterix-app/src/test/resources/metadata-transactions/init-state-queries/customers_orders.aql
index af15a23..87aa2fc 100644
--- a/asterix-app/src/test/resources/metadata-transactions/init-state-queries/customers_orders.aql
+++ b/asterix-app/src/test/resources/metadata-transactions/init-state-queries/customers_orders.aql
@@ -44,10 +44,10 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Customers(CustomerType)
-  partitioned by key cid, name on group1;
+  primary key cid, name on group1;
 
 create dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 create index ordCustId on Orders(cid);
 
diff --git a/asterix-app/src/test/resources/metadata-transactions/queries/create_duplicate_dataset.aql b/asterix-app/src/test/resources/metadata-transactions/queries/create_duplicate_dataset.aql
index 047eddf..b5c3c28 100644
--- a/asterix-app/src/test/resources/metadata-transactions/queries/create_duplicate_dataset.aql
+++ b/asterix-app/src/test/resources/metadata-transactions/queries/create_duplicate_dataset.aql
@@ -1,4 +1,4 @@
 use dataverse custord;
 
 create dataset Customers(CustomerType)
-  partitioned by key cid, name on group1;
+  primary key cid, name on group1;
diff --git a/asterix-app/src/test/resources/metadata-transactions/queries/rollback_new_dataset.aql b/asterix-app/src/test/resources/metadata-transactions/queries/rollback_new_dataset.aql
index aa76ae2..a9ad490 100644
--- a/asterix-app/src/test/resources/metadata-transactions/queries/rollback_new_dataset.aql
+++ b/asterix-app/src/test/resources/metadata-transactions/queries/rollback_new_dataset.aql
@@ -2,7 +2,7 @@
 
 // Creating this dataset should succeed.
 create dataset NewDataset(CustomerType)
-  partitioned by key cid, name on group1;
+  primary key cid, name on group1;
 
 
 // Creating this duplicate type should fail, and rollback should remove the new dataverse.
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_1.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_1.aql
new file mode 100644
index 0000000..de0e870
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_1.aql
@@ -0,0 +1,25 @@
+/*
+ * Description  : create a dataset providing hints but use  whitespace 
+ * Expected Res : Success
+ * Date         : 29 Jan 2013
+ * Issue        : 251
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as open {
+  id:int32,
+  text: string
+}
+
+write output to nc1:"mdtest/basic_issue_251_dataset_hint_1.adm";
+
+create dataset Book(LineType)
+primary key id
+hints(  cardinality  =   2000);
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='test'
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_2.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_2.aql
new file mode 100644
index 0000000..27d2c2c
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_2.aql
@@ -0,0 +1,25 @@
+/*
+ * Description  : create a dataset providing a valid hint and do not use any whitespace
+ * Expected Res : Success
+ * Date         : 29 Jan 2013
+ * Issue        : 251
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as open {
+  id:int32,
+  text: string
+}
+
+write output to nc1:"mdtest/basic_issue_251_dataset_hint_2.adm";
+
+create dataset Book(LineType)
+primary key id
+hints(cardinality=2000);
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='test'
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_3.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_3.aql
new file mode 100644
index 0000000..b6a730f
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_3.aql
@@ -0,0 +1,25 @@
+/*
+ * Description  : create a dataset providing  hint (in upper case)
+ * Expected Res : Success
+ * Date         : 29 Jan 2013
+ * Issue        : 251
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as open {
+  id:int32,
+  text: string
+}
+
+write output to nc1:"mdtest/basic_issue_251_dataset_hint_3.adm";
+
+create dataset Book(LineType)
+primary key id
+hints(CARDINALITY=2000);
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='test'
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_4.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_4.aql
new file mode 100644
index 0000000..af7077a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_4.aql
@@ -0,0 +1,24 @@
+/*
+ * Description  : create a dataset without providing any hints.
+ * Expected Res : Success
+ * Date         : 29 Jan 2013
+ * Issue        : 251
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as open {
+  id:int32,
+  text: string
+}
+
+write output to nc1:"mdtest/basic_issue_251_dataset_hint_4.adm";
+
+create dataset Book(LineType)
+primary key id;
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='test'
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta02.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta02.aql
index 8bfc240..7778e99 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta02.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta02.aql
@@ -13,7 +13,7 @@
 id : int32
 }
 
-create dataset testdv.dst01(testtype) partitioned by key id;
+create dataset testdv.dst01(testtype) primary key id;
 
 for $l in dataset('Metadata.Dataset')
 where $l.DataverseName = 'testdv' and $l.DatasetName = 'dst01'
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta05.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta05.aql
index 9eb129d..201ab6e 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta05.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta05.aql
@@ -14,7 +14,7 @@
 name : string
 }
 
-create dataset testdv.t1(testtype) partitioned by key id;
+create dataset testdv.t1(testtype) primary key id;
 
 create index idx1 on testdv.t1(name);
 
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta09.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta09.aql
index 28a3794..56c6c7a 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta09.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta09.aql
@@ -13,7 +13,7 @@
 id:int32
 }
 
-create dataset test.t1(testtype) partitioned by key id;
+create dataset test.t1(testtype) primary key id;
 
 insert into dataset test.t1({"id":123});
 insert into dataset test.t1({"id":133});
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta11.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta11.aql
index d781114..0e02802 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta11.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta11.aql
@@ -13,7 +13,7 @@
 id : int32
 }
 
-create dataset test.dst01(testtype) partitioned by key id;
+create dataset test.dst01(testtype) primary key id;
 
 drop dataset test.dst01;
 
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta12.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta12.aql
index 1de7ac5..82e2eb0 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta12.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta12.aql
@@ -14,7 +14,7 @@
 name : string
 }
 
-create dataset test.dst01(testtype) partitioned by key id;
+create dataset test.dst01(testtype) primary key id;
 
 create index idx1 on test.dst01(name);
 
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_251_dataset_hint_error_1.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_251_dataset_hint_error_1.aql
new file mode 100644
index 0000000..eb3f3dc
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_251_dataset_hint_error_1.aql
@@ -0,0 +1,19 @@
+/*
+ * Description  : create a dataset providing invalid hints
+ * Expected Res : Failure
+ * Date         : 29 Jan 2013
+ * Issue        : 251
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as open {
+  id:int32,
+  text: string
+}
+
+create dataset Book(LineType)
+primary key id
+hints(size=2000,tuple_size=100);
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_251_dataset_hint_error_2.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_251_dataset_hint_error_2.aql
new file mode 100644
index 0000000..830370e
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_251_dataset_hint_error_2.aql
@@ -0,0 +1,24 @@
+/*
+ * Description  : create a dataset providingi an invalid value for a hint and an unknown hint
+ * Expected Res : Failure
+ * Date         : 29 Jan 2013
+ * Issue        : 251
+ */
+
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as open {
+  id:int32,
+  text: string
+}
+
+create dataset Book(LineType)
+primary key id
+hints(cardinality="-20jh0",size=45);
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='test'
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/transaction/failure_subsequent_no_execution.aql b/asterix-app/src/test/resources/metadata/queries/transaction/failure_subsequent_no_execution.aql
index d4678f2..19b87a2 100644
--- a/asterix-app/src/test/resources/metadata/queries/transaction/failure_subsequent_no_execution.aql
+++ b/asterix-app/src/test/resources/metadata/queries/transaction/failure_subsequent_no_execution.aql
@@ -37,7 +37,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Customers(CustomerType)
-  partitioned by key cid, name on group1;
+  primary key cid, name on group1;
 
 create nodegroup group1 on nc1, nc2;
 
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2.adm
new file mode 100644
index 0000000..7c82b18
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ { "Name": "CARDINALITY", "Value": "2000" } }}, "Timestamp": "Tue Jan 29 19:11:26 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3.adm
new file mode 100644
index 0000000..f931b40
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ { "Name": "CARDINALITY", "Value": "2000" } }}, "Timestamp": "Tue Jan 29 19:00:38 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4.adm
new file mode 100644
index 0000000..efd3a7e
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:59:57 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta02.adm b/asterix-app/src/test/resources/metadata/results/basic/meta02.adm
index 2424676..394af5c 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta02.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta02.adm
@@ -1 +1 @@
-{ "DataverseName": "testdv", "DatasetName": "dst01", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Sat Sep 15 14:44:58 PDT 2012" }
+{ "DataverseName": "testdv", "DatasetName": "dst01", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:34 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta09.adm b/asterix-app/src/test/resources/metadata/results/basic/meta09.adm
index b85737d..9bcb2a4 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta09.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta09.adm
@@ -1 +1 @@
-{ "DataverseName": "test", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Sat Nov 24 14:28:44 PST 2012" }
+{ "DataverseName": "test", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:55:25 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta16.adm b/asterix-app/src/test/resources/metadata/results/basic/meta16.adm
index 8abc339..b56fe7c 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta16.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta16.adm
@@ -1,8 +1,8 @@
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta17.adm b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
index 8a56248..2de89a6 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
@@ -1,57 +1,60 @@
-{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "FeedDetails", "FieldType": "Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Function", "FieldType": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Status", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "FeedDetails", "FieldType": "Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Hints", "FieldType": "Field_Hints_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:30 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Hints_in_DatasetRecordType_ItemType", "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:30 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:30 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Function", "FieldType": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Status", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "interval", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/testsuite.xml b/asterix-app/src/test/resources/metadata/testsuite.xml
index b1b303e..6494cbf 100644
--- a/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -105,6 +105,21 @@
         <output-file compare="Text">meta21.adm</output-file>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_251_dataset_hint_2">
+        <output-file compare="Text">issue_251_dataset_hint_2.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_251_dataset_hint_3">
+        <output-file compare="Text">issue_251_dataset_hint_3.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_251_dataset_hint_4">
+        <output-file compare="Text">issue_251_dataset_hint_4.adm</output-file>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="exception">
     <test-case FilePath="exception">
@@ -155,6 +170,18 @@
         <expected-error>MetadataException</expected-error>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_251_dataset_hint_error_1">
+        <output-file compare="Text">none.adm</output-file>
+        <expected-error>AsterixException</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_251_dataset_hint_error_2">
+        <output-file compare="Text">none.adm</output-file>
+        <expected-error>AsterixException</expected-error>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="transaction">
     <test-case FilePath="transaction">
diff --git a/asterix-app/src/test/resources/misc/split01.aql b/asterix-app/src/test/resources/misc/split01.aql
index 46b2403..de9ec5c 100644
--- a/asterix-app/src/test/resources/misc/split01.aql
+++ b/asterix-app/src/test/resources/misc/split01.aql
@@ -30,9 +30,9 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to nc1:"/tmp/split01.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/all-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/all-drop.aql
index 13a0414..0672b32 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/all-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/all-drop.aql
@@ -36,6 +36,6 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset All(ExampleType)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 drop dataset All;
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/all-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/all-load.aql
index 73fa096..745562d 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/all-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/all-load.aql
@@ -36,7 +36,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset All(ExampleType)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset All from nc1:"/home/yasser/Dropbox/Research/data/allData.json";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/all-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/all-scan.aql
index 7006eb4..1bd4b73 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/all-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/all-scan.aql
@@ -36,7 +36,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset All(ExampleType)
-  partitioned by key id on group1;
+  primary key id on group1;
   
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_all_3.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/cust-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/cust-drop.aql
index b6a1913..23aeea8 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/cust-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/cust-drop.aql
@@ -23,6 +23,6 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 drop dataset Customers;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/cust-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/cust-load.aql
index fb57584..4af623a 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/cust-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/cust-load.aql
@@ -17,7 +17,7 @@
 declare nodegroup group1 on nc1;
 
 declare dataset Dataverse(DataverseType)
-  partitioned by key dataverseName on group1;        
+  primary key dataverseName on group1;        
       
 for $c in dataset('Dataverse')
 return $c
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/cust-q1.aql b/asterix-app/src/test/resources/nontagged/custord/local/cust-q1.aql
index bc7ac1f..bc1c623 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/cust-q1.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/cust-q1.aql
@@ -23,7 +23,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_cust_q1.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/cust-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/cust-scan.aql
index a54197f..cf8aacf 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/cust-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/cust-scan.aql
@@ -23,7 +23,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_cust_3.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/emp-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/emp-drop.aql
index 22230c1..0499a72 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/emp-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/emp-drop.aql
@@ -24,6 +24,6 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Emp(EmpType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 drop dataset Emp;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/emp-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/emp-load.aql
index c0cbfc3..e676ef6 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/emp-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/emp-load.aql
@@ -20,6 +20,6 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Emp(EmpType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Emp from nc1:"/home/yasser/Dropbox/Research/data/EmpData.json";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/emp-q1.aql b/asterix-app/src/test/resources/nontagged/custord/local/emp-q1.aql
index 20b83f7..ef8ce90 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/emp-q1.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/emp-q1.aql
@@ -19,7 +19,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Emp(EmpType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_emp_q1.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/emp-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/emp-scan.aql
index df903bf..e69b361 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/emp-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/emp-scan.aql
@@ -20,7 +20,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Emp(EmpType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_emp_3.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/join-01.aql b/asterix-app/src/test/resources/nontagged/custord/local/join-01.aql
index d5a32f5..587fa4d 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/join-01.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/join-01.aql
@@ -34,9 +34,9 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_join_1.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/join-02.aql b/asterix-app/src/test/resources/nontagged/custord/local/join-02.aql
index 9b5caf1..ffcc6a0 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/join-02.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/join-02.aql
@@ -34,9 +34,9 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_join_2.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/join-03.aql b/asterix-app/src/test/resources/nontagged/custord/local/join-03.aql
index bff863c..a953d76 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/join-03.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/join-03.aql
@@ -34,9 +34,9 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_join_3.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/numeric-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/numeric-drop.aql
index b82bf84..a576214 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/numeric-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/numeric-drop.aql
@@ -15,6 +15,6 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Numeric(ExampleType)
-  partitioned by key id on group1;
+  primary key id on group1;
   
 drop dataset Numeric;
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/numeric-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/numeric-load.aql
index 5a7f162..491b391 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/numeric-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/numeric-load.aql
@@ -16,7 +16,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Numeric(ExampleType)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset Numeric from nc1:"/home/yasser/Dropbox/Research/data/numericData.json";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/numeric-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/numeric-scan.aql
index ec60095..c5bdb34 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/numeric-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/numeric-scan.aql
@@ -15,7 +15,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Numeric(ExampleType)
-  partitioned by key id on group1;
+  primary key id on group1;
   
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_numeric_3.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/ord-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/ord-drop.aql
index fd68e70..d4fa668 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/ord-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/ord-drop.aql
@@ -15,6 +15,6 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 drop dataset Orders;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/ord-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/ord-load.aql
index fe1d61d..81d25bc 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/ord-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/ord-load.aql
@@ -15,6 +15,6 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 load dataset Orders from nc1:"/home/yasser/Dropbox/Research/data/orderData.json";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/ord-q1.aql b/asterix-app/src/test/resources/nontagged/custord/local/ord-q1.aql
index 97aa315..d008979 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/ord-q1.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/ord-q1.aql
@@ -15,7 +15,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_ord_q1.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/ord-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/ord-scan.aql
index e7c5236..9fc0faf 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/ord-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/ord-scan.aql
@@ -15,7 +15,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_ord_3.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/spatial-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/spatial-drop.aql
index 5e7b515..304ec7c 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/spatial-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/spatial-drop.aql
@@ -14,6 +14,6 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Spatial(ExampleType)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 drop dataset Spatial;
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/spatial-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/spatial-load.aql
index b36808a..a0e7bae 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/spatial-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/spatial-load.aql
@@ -14,7 +14,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Spatial(ExampleType)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset Spatial from nc1:"/home/yasser/Dropbox/Research/data/spatialData.json";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/spatial-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/spatial-scan.aql
index e4faa16..5027c29 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/spatial-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/spatial-scan.aql
@@ -16,7 +16,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Spatial(ExampleType)
-  partitioned by key id on group1;
+  primary key id on group1;
   
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_spatial_3.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/temp-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/temp-drop.aql
index 80a86ac..df7e293 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/temp-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/temp-drop.aql
@@ -13,6 +13,6 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Temp(ExampleType)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 drop dataset Temp;
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/temp-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/temp-load.aql
index 0400db1..d75168a 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/temp-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/temp-load.aql
@@ -14,7 +14,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Temp(ExampleType)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset Temp from nc1:"/home/yasser/Dropbox/Research/data/tempData.json";
 
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/temp-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/temp-scan.aql
index 8dc3cfa..7a369e6 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/temp-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/temp-scan.aql
@@ -14,7 +14,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset Temp(ExampleType)
-  partitioned by key id on group1;
+  primary key id on group1;
   
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_temp_3.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/alltables-loadAsOpen.aql b/asterix-app/src/test/resources/nontagged/tpch/local/alltables-loadAsOpen.aql
index 7847546..0c4a5b3 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/alltables-loadAsOpen.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/alltables-loadAsOpen.aql
@@ -39,21 +39,21 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 //load dataset LineItems from nc1:"/home/yasser/Dropbox/Research/data/tpch_data/alldata/jsonformat/lineitem.json" pre-sorted;
 //load dataset Orders from nc1:"/home/yasser/Dropbox/Research/data/tpch_data/alldata/jsonformat/orders.json" pre-sorted;
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/alltablesAsopen-scan.aql b/asterix-app/src/test/resources/nontagged/tpch/local/alltablesAsopen-scan.aql
index cd32ae0..c147235 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/alltablesAsopen-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/alltablesAsopen-scan.aql
@@ -79,21 +79,21 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 write output to nc1:"/home/yasser/Desktop/result_scan.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-drop.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-drop.aql
index e8c56c1..a30757c 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-drop.aql
@@ -90,21 +90,21 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 drop dataset LineItems;
 //drop dataset Orders;
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-load.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-load.aql
index 9e05370..0bd1a39 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-load.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-load.aql
@@ -90,21 +90,21 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 //load dataset LineItems from nc1:"/home/yasser/Dropbox/Research/data/tpch_data/alldata/jsonformat/lineitem.json" pre-sorted;
 //load dataset Orders from nc1:"/home/yasser/Dropbox/Research/data/tpch_data/alldata/jsonformat/orders.json" pre-sorted;
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q1.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q1.aql
index 441ac43..f08d390 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q1.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q1.aql
@@ -24,7 +24,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_closed_1.adm";
  
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q3.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q3.aql
index e002ad9..6933881 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q3.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q3.aql
@@ -47,11 +47,11 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_closed_3.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q5.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q5.aql
index ceca8cd..d605099 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q5.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q5.aql
@@ -70,17 +70,17 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
   
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_closed_5.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q9.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q9.aql
index 2586561..acf2ae3 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q9.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q9.aql
@@ -74,17 +74,17 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
   
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_closed_9.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-scan.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-scan.aql
index f4f50b1..4b97d2e 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-scan.aql
@@ -90,21 +90,21 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 write output to nc1:"/home/yasser/Desktop/result_scan.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-drop.aql b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-drop.aql
index 06d352c..61b6d85 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-drop.aql
@@ -39,21 +39,21 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 drop dataset LineItems;
 //drop dataset Orders;
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-load.aql b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-load.aql
index 4241366..fdcf15c 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-load.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-load.aql
@@ -39,21 +39,21 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 //load dataset LineItems from nc1:"/home/yasser/Dropbox/Research/data/tpch_data/alldata/jsonformat/lineitem.json" pre-sorted;
 //load dataset Orders from nc1:"/home/yasser/Dropbox/Research/data/tpch_data/alldata/jsonformat/orders.json" pre-sorted;
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q1.aql b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q1.aql
index ec5e202..6288b4f 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q1.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q1.aql
@@ -10,7 +10,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_open_1.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q3.aql b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q3.aql
index 921cea6..955eba7 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q3.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q3.aql
@@ -18,11 +18,11 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_open_3.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q5.aql b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q5.aql
index 82c996f..ffbd71d 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q5.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q5.aql
@@ -30,17 +30,17 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
   
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_open_5.adm";
 
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q9.aql b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q9.aql
index 75b176d..8064099 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q9.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q9.aql
@@ -39,17 +39,17 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
   
 write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_closed_9.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-multipred.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-multipred.aql
index 9b83718..f235220 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-multipred.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-multipred.aql
@@ -37,8 +37,8 @@
   total: float
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
-create dataset Orders(OrderType) partitioned by key oid;
+create dataset Customers(CustomerType) primary key cid;
+create dataset Orders(OrderType) primary key oid;
 
 write output to nc1:"rttest/btree-index-join_primary-equi-join-multipred.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_01.aql
index 75c522b..8e8324f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_01.aql
@@ -15,8 +15,8 @@
           lname : string
 }
 
-create dataset test1.DsOne(TestType) partitioned by key key1;
-create dataset test1.DsTwo(TestType) partitioned by key key1;
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
 
 // Please note content enclosed in the comment in the predicate is the HINT to the optimizer
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_02.aql
index 4dd0d00..8a62332 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_02.aql
@@ -15,8 +15,8 @@
           lname : string
 }
 
-create dataset test1.DsOne(TestType) partitioned by key key1;
-create dataset test1.DsTwo(TestType) partitioned by key key1;
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
 
 // Please note content enclosed in the comment in the predicate is the HINT to the optimizer
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_01.aql
index a0877ba..6950747 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_01.aql
@@ -15,8 +15,8 @@
           lname : string
 }
 
-create dataset test1.DsOne(TestType) partitioned by key key1;
-create dataset test1.DsTwo(TestType) partitioned by key key1;
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
 
 // Please note content enclosed in the comment in the predicate is a HINT to the optimizer
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_02.aql
index 59db20b..b7832fd 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_02.aql
@@ -15,8 +15,8 @@
           lname : string
 }
 
-create dataset test1.DsOne(TestType) partitioned by key key1;
-create dataset test1.DsTwo(TestType) partitioned by key key1;
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
 
 // Please note content enclosed in the comment in the predicate is a HINT to the optimizer
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_03.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_03.aql
index d60a8ac..9268c2c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_03.aql
@@ -35,8 +35,8 @@
   total: float
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
-create dataset Orders(OrderType) partitioned by key oid;
+create dataset Customers(CustomerType) primary key cid;
+create dataset Orders(OrderType) primary key oid;
 
 write output to nc1:"rttest/btree-index-join_primary-equi-join_04.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_04.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_04.aql
index 56211e8..3c07154 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_04.aql
@@ -35,8 +35,8 @@
   total: float
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
-create dataset Orders(OrderType) partitioned by key oid;
+create dataset Customers(CustomerType) primary key cid;
+create dataset Orders(OrderType) primary key oid;
 
 write output to nc1:"rttest/btree-index-join_primary-equi-join_05.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_05.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_05.aql
index 0e6efe8..7da6b0c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_05.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_05.aql
@@ -26,7 +26,7 @@
   }
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 write output to nc1:"rttest/btree-index-join_primary-equi-join_06.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_01.aql
index 559ede2..8bc306c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_01.aql
@@ -15,8 +15,8 @@
           lname : string
 }
 
-create dataset test1.DsOne(TestType) partitioned by key key1;
-create dataset test1.DsTwo(TestType) partitioned by key key1;
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
 
 // Please note content enclosed in the comment in the predicate is the HINT to the optimizer
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_02.aql
index 4948b6f..06be0e9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_02.aql
@@ -15,8 +15,8 @@
           lname : string
 }
 
-create dataset test1.DsOne(TestType) partitioned by key key1;
-create dataset test1.DsTwo(TestType) partitioned by key key1;
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
 
 // Please note content enclosed in the comment in the predicate is the HINT to the optimizer
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_01.aql
index f48cfa3..67d75a1 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_01.aql
@@ -15,8 +15,8 @@
           lname : string
 }
 
-create dataset test1.DsOne(TestType) partitioned by key key1;
-create dataset test1.DsTwo(TestType) partitioned by key key1;
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
 
 // Please note content enclosed in the comment in the predicate is the HINT to the optimizer
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_02.aql
index b2f67b3..a7be083 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_02.aql
@@ -15,8 +15,8 @@
           lname : string
 }
 
-create dataset test1.DsOne(TestType) partitioned by key key1;
-create dataset test1.DsTwo(TestType) partitioned by key key1;
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
 
 // Please note content enclosed in the comment in the predicate is the HINT to the optimizer
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_01.aql
index ef4dc05..66f2ace 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_01.aql
@@ -15,8 +15,8 @@
           lname : string
 }
 
-create dataset test1.DsOne(TestType) partitioned by key key1;
-create dataset test1.DsTwo(TestType) partitioned by key key1;
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
 
 // Please note content enclosed in the comment in the predicate is the HINT to the optimizer
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_02.aql
index 4c62ebb..39c1d51 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_02.aql
@@ -15,8 +15,8 @@
           lname : string
 }
 
-create dataset test1.DsOne(TestType) partitioned by key key1;
-create dataset test1.DsTwo(TestType) partitioned by key key1;
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
 
 // Please note content enclosed in the comment in the predicate is the HINT to the optimizer
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_01.aql
index 79e24cb..ef6a616 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_01.aql
@@ -15,8 +15,8 @@
           lname : string
 }
 
-create dataset test1.DsOne(TestType) partitioned by key key1;
-create dataset test1.DsTwo(TestType) partitioned by key key1;
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
 
 // Please note content enclosed in the comment in the predicate is the HINT to the optimizer
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_02.aql
index 9709acd..4924514 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_02.aql
@@ -15,8 +15,8 @@
           lname : string
 }
 
-create dataset test1.DsOne(TestType) partitioned by key key1;
-create dataset test1.DsTwo(TestType) partitioned by key key1;
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
 
 // Please note content enclosed in the comment in the predicate is the HINT to the optimizer
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multiindex.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multiindex.aql
index 44cdef8..c596984 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multiindex.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multiindex.aql
@@ -39,10 +39,10 @@
 } 
 
 create dataset FacebookUsers(FacebookUserType)
-partitioned by key id;
+primary key id;
 
 create dataset FacebookMessages(FacebookMessageType)
-partitioned by key message-id;
+primary key message-id;
 
 create index fbmIdxAutId if not exists on FacebookMessages(author-id-copy);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multipred.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multipred.aql
index 03cbf24..bb803b0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multipred.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multipred.aql
@@ -27,9 +27,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index title_index on DBLP(title);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_01.aql
index e07eee5..8a4f056 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_01.aql
@@ -25,9 +25,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index title_index on DBLP(title);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_02.aql
index 2546244..fa68cd2 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_02.aql
@@ -25,9 +25,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index title_index on CSX(title);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_03.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_03.aql
index 251c88e..3eedafd 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_03.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index title_index on DBLP(title);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-01.aql
index d3d8d69..0d096a7 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-01.aql
@@ -19,7 +19,7 @@
 }
 
 // create internal dataset with primary index (composite key) defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname > "Roger"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-02.aql
index eeded38..fbcdd50 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-02.aql
@@ -19,7 +19,7 @@
 }
 
 // create internal dataset with primary index defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname >= "Susan"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-03.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-03.aql
index 1046edd..2047b62 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-03.aql
@@ -19,7 +19,7 @@
 }
 
 // create internal dataset with primary index defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname < "Isa"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-04.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-04.aql
index 2bee1fe..84147ae 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-04.aql
@@ -19,7 +19,7 @@
 }
 
 // create internal dataset with primary index defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname <= "Vanpatten"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-05.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-05.aql
index 0423290..0fbf179 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-05.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-05.aql
@@ -19,7 +19,7 @@
 }
 
 // create internal dataset with primary index defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname != "Max"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-06.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-06.aql
index 313a4fa..08164fd 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-06.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-06.aql
@@ -19,7 +19,7 @@
 }
 
 // create internal dataset with primary index defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname = "Julio"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-07.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-07.aql
index e9063eb..2b635a5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-07.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-07.aql
@@ -20,7 +20,7 @@
 }
 
 // create internal dataset with primary index defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.lname = "Kim"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-08.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-08.aql
index 9919457..337072e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-08.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-08.aql
@@ -15,7 +15,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname = "Young Seok" and $emp.lname = "Kim"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-09.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-09.aql
index 8973d2d..cc52d2a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-09.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-09.aql
@@ -19,7 +19,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname = "Julio" or $emp.lname = "Malaika"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-10.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-10.aql
index 5076e97..4d0f85e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-10.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-10.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname > "Alex" and $emp.lname < "Zach"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-11.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-11.aql
index feeaaad..164e49a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-11.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-11.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname > "Allan" and $emp.lname < "Zubi"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-12.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-12.aql
index 740cca6..659d1ff 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-12.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-12.aql
@@ -19,7 +19,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname > "Allan" and $emp.lname = "Xu"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-13.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-13.aql
index 47e81b2..3c49200 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-13.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-13.aql
@@ -19,7 +19,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname = "Julio" and $emp.lname < "Xu"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-14.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-14.aql
index 0ec31aa..e93061f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-14.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-14.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname >= "Michael" and $emp.lname <= "Xu"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-15.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-15.aql
index f595804..97a486a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-15.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-15.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname > "Craig" and $emp.lname > "Kevin" and $emp.fname < "Mary" and $emp.lname < "Tomes"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-16.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-16.aql
index 022bd50..cf80ed2 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-16.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-16.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname >= "Craig" and $emp.lname >= "Kevin" and $emp.fname <= "Mary" and $emp.lname <= "Tomes"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-17.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-17.aql
index ad114cd..db9c994 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-17.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-17.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname <= "Craig" and $emp.lname > "Kevin"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-18.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-18.aql
index 43b0bd3..9787005 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-18.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-18.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname != "Michael" and $emp.lname != "Carey"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-19.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-19.aql
index fe2dc41..0dff6af 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-19.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-19.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname > "Craig" and $emp.lname > "Kevin" and $emp.fname <= "Mary" and $emp.lname <= "Tomes"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-20.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-20.aql
index c817492..ac9e374 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-20.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-20.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
 
 for $emp in dataset('testdst') 
 where $emp.fname >= "Craig" and $emp.lname >= "Kevin" and $emp.fname < "Mary" and $emp.lname < "Tomes"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-21.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-21.aql
index 46c107c..afa241f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-21.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-21.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname;
+create dataset testdst(TestType) primary key fname;
 
 for $emp in dataset('testdst') 
 where $emp.fname > "Max" 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-22.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-22.aql
index 0f74980..4af86a0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-22.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-22.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname;
+create dataset testdst(TestType) primary key fname;
 
 for $emp in dataset('testdst') 
 where $emp.fname >= "Sofia" 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-23.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-23.aql
index c942165..f73bd1f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-23.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-23.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname;
+create dataset testdst(TestType) primary key fname;
 
 for $emp in dataset('testdst') 
 where $emp.fname < "Chen" 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-24.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-24.aql
index 0c66008..1faa37e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-24.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-24.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname;
+create dataset testdst(TestType) primary key fname;
 
 for $emp in dataset('testdst') 
 where $emp.fname <= "Julio" 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-25.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-25.aql
index 770baf5..6b24b7c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-25.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-25.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname;
+create dataset testdst(TestType) primary key fname;
 
 for $emp in dataset('testdst') 
 where $emp.fname > "Neil" and $emp.fname < "Roger" 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-26.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-26.aql
index ba5de37..05a69d2 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-26.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-26.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key fname;
+create dataset testdst(TestType) primary key fname;
 
 for $emp in dataset('testdst') 
 where $emp.fname >= "Max" and $emp.fname <= "Roger" 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-31.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-31.aql
index 5117da4..c2e6257 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-31.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-31.aql
@@ -20,7 +20,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-32.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-32.aql
index 37b46b3..d009e2e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-32.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-32.aql
@@ -20,7 +20,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-33.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-33.aql
index 8f1b4d5..2b364e4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-33.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-33.aql
@@ -20,7 +20,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-34.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-34.aql
index 5042bd9..f20b5a9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-34.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-34.aql
@@ -20,7 +20,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-35.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-35.aql
index 2e38330..dfc5017 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-35.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-35.aql
@@ -20,7 +20,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-36.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-36.aql
index 84bdcd49..7d72623 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-36.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-36.aql
@@ -20,7 +20,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-37.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-37.aql
index 73133c2..7c155ce 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-37.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-37.aql
@@ -20,7 +20,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-38.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-38.aql
index 3f844ee..dc63f05 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-38.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-38.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-39.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-39.aql
index 3813e48..a9d32c3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-39.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-39.aql
@@ -20,7 +20,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-40.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-40.aql
index 4e13597..25c6dec 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-40.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-40.aql
@@ -18,7 +18,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-41.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-41.aql
index c724cbe..bc65009 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-41.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-41.aql
@@ -18,7 +18,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-42.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-42.aql
index 06c93b0..289d1b4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-42.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-42.aql
@@ -20,7 +20,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-43.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-43.aql
index 490becb..86e5d67 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-43.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-43.aql
@@ -20,7 +20,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-44.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-44.aql
index f75f3de..54e149e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-44.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-44.aql
@@ -18,7 +18,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-45.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-45.aql
index 6717780..cc33e5a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-45.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-45.aql
@@ -18,7 +18,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-46.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-46.aql
index 8a508b3..f486e44 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-46.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-46.aql
@@ -18,7 +18,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-47.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-47.aql
index 70e8185..5d06a51 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-47.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-47.aql
@@ -18,7 +18,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-48.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-48.aql
index bb6f24d..2dc1ab4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-48.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-48.aql
@@ -18,7 +18,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-49.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-49.aql
index 6d11235..6175ba6 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-49.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-49.aql
@@ -19,7 +19,7 @@
 }
 
 // create internal dataset
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-50.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-50.aql
index 3a91c09..04aefc3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-50.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-50.aql
@@ -18,7 +18,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-51.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-51.aql
index 3be3875..5ae63a9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-51.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-51.aql
@@ -18,7 +18,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname,lname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-52.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-52.aql
index 7b52ec9..e720742 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-52.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-52.aql
@@ -17,7 +17,7 @@
     lname : string
 }
  
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-53.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-53.aql
index 45536b5..65a9b6d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-53.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-53.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-54.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-54.aql
index f1a3cb5..a18975a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-54.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-54.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-55.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-55.aql
index bbeca00..94c1463 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-55.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-55.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-56.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-56.aql
index 7ecfea9..53f8ca4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-56.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-56.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-57.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-57.aql
index 2183f69..43cf54d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-57.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-57.aql
@@ -17,7 +17,7 @@
     lname : string
 }
 
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-58.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-58.aql
index bdf16b6..aa18136 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-58.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-58.aql
@@ -17,7 +17,7 @@
     lname : string
 }
  
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
 
 create index sec_Idx on testdst(fname);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/cell-aggregation-with-filtering.aql b/asterix-app/src/test/resources/optimizerts/queries/cell-aggregation-with-filtering.aql
index a328a1b..c883645 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/cell-aggregation-with-filtering.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/cell-aggregation-with-filtering.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if exists on nc1, nc2;
 
 create dataset TwitterData(Tweet)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 
 load dataset TwitterData 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-complex.aql b/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-complex.aql
index 313e9a4..7dfe817 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-complex.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-complex.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-simple.aql b/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-simple.aql
index 8a237d2..d21fa6e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-simple.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-simple.aql
@@ -16,7 +16,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 
 write output to nc1:"rttest/consolidate-selects-simple.aql";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/cust_group_no_agg.aql b/asterix-app/src/test/resources/optimizerts/queries/cust_group_no_agg.aql
index 6fdae88..6a86b8a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/cust_group_no_agg.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/cust_group_no_agg.aql
@@ -18,7 +18,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 
 write output to nc1:"/tmp/.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/denorm-cust-order.aql b/asterix-app/src/test/resources/optimizerts/queries/denorm-cust-order.aql
index acad4a1..389dad5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/denorm-cust-order.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/denorm-cust-order.aql
@@ -36,9 +36,9 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 create dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to nc1:"/tmp/custorder.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/distinct_aggregate.aql b/asterix-app/src/test/resources/optimizerts/queries/distinct_aggregate.aql
index a4784c1..6fc1dac 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/distinct_aggregate.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/distinct_aggregate.aql
@@ -24,7 +24,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset LineItems_q1(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 
 write output to nc1:"rttest/tpch_q1_pricing_summary_report_nt.adm";
  
diff --git a/asterix-app/src/test/resources/optimizerts/queries/fj-dblp-csx.aql b/asterix-app/src/test/resources/optimizerts/queries/fj-dblp-csx.aql
index 2898058..f748eae 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/fj-dblp-csx.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/fj-dblp-csx.aql
@@ -22,8 +22,8 @@
 
 create nodegroup group1 if not exists on nc1, nc2;
 
-create dataset DBLP(DBLPType) partitioned by key id on group1;
-create dataset CSX(CSXType) partitioned by key id on group1;
+create dataset DBLP(DBLPType) primary key id on group1;
+create dataset CSX(CSXType) primary key id on group1;
 
 write output to nc1:'rttest/fj-dblp-csx.adm';
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/fj-phase1.aql b/asterix-app/src/test/resources/optimizerts/queries/fj-phase1.aql
index 778232a..19ceda0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/fj-phase1.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/fj-phase1.aql
@@ -19,10 +19,10 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Users(UserType) 
-  partitioned by key uid on group1;
+  primary key uid on group1;
 
 create dataset Visitors(VisitorType) 
-  partitioned by key vid on group1;
+  primary key vid on group1;
 
 
 // set simfunction "jaccard";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/fj-phase2-with-hints.aql b/asterix-app/src/test/resources/optimizerts/queries/fj-phase2-with-hints.aql
index 866c027..a0973f4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/fj-phase2-with-hints.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/fj-phase2-with-hints.aql
@@ -15,7 +15,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP_fuzzyjoin_078(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 write output to nc1:'rttest/fuzzyjoin_078.adm';
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inlined_q18_large_volume_customer.aql b/asterix-app/src/test/resources/optimizerts/queries/inlined_q18_large_volume_customer.aql
index 49c4157..0db7a51 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inlined_q18_large_volume_customer.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inlined_q18_large_volume_customer.aql
@@ -49,11 +49,11 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 create dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 
 write output to nc1:"/tmp/inlined_q18_large_volume_customer.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/introhashpartitionmerge.aql b/asterix-app/src/test/resources/optimizerts/queries/introhashpartitionmerge.aql
index 9427ab5..cd1d78c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/introhashpartitionmerge.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/introhashpartitionmerge.aql
@@ -9,7 +9,7 @@
 
 create nodegroup group1 if not exists on nc1, nc2;
 
-create dataset TOKENSRANKEDADM(TOKENSRANKEDADMType) partitioned by key rank on group1;
+create dataset TOKENSRANKEDADM(TOKENSRANKEDADMType) primary key rank on group1;
 
 write output to nc1:'rttest/introhashpartitionmerge.adm';
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains-panic.aql
index 31c0d03..d193238 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains-panic.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains.aql
index 9851cd8..137b1c7 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check-panic.aql
index 4608aeb..6f7f38c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check-panic.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check.aql
index 7b264b7..0d9db19 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-panic.aql
index 9412cf3..7decdb5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-panic.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance.aql
index d6be7d4..c7a9ba4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-edit-distance.aql
index b241311..3a3d7bb 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-edit-distance.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-jaccard.aql
index 33f9d5a..5e9ac0e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-jaccard.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard-check.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard-check.aql
index 376415f..dbb9c71 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard-check.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard.aql
index ef7143a..f25fb8b 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check-panic.aql
index 8209572..c105eab 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check-panic.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check.aql
index b55a516..b12037d1 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-panic.aql
index 63bead1..84bcbb9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-panic.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance.aql
index 7958fb4..afafa37 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-edit-distance.aql
index f156bd2..86b6474 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-edit-distance.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-jaccard.aql
index 6632189..64812db 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-jaccard.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard-check.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard-check.aql
index 61dcc7f..4f8b8fe 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard-check.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard.aql
index 7af2161..62d6806 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-fuzzyeq-jaccard.aql
index d65a517..4ac02c8 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-fuzzyeq-jaccard.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard-check.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard-check.aql
index 9551294..80e8aef 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard-check.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard.aql
index 6580bd0..588ca14 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-contains.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-contains.aql
index eee681a..8d9cb02 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-contains.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-contains.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-fuzzyeq-jaccard.aql
index fb6d0e8..006398e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-fuzzyeq-jaccard.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard-check.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard-check.aql
index 10ca5ce..4c814e4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard-check.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard.aql
index 7f544b6..53a5bbb 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.aql
index 4d6bfd9..1c96adc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.aql
index a4d4163..e80443e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic.aql
index d6f58fc..d0d3035 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let.aql
index 24d3202..2d2f34f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-let.aql
index de57848..fb59040 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-let.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-multi-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-multi-let.aql
index 2eed598..b86c6b2 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-multi-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-multi-let.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let-panic.aql
index 337eced..aa9bc59 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let-panic.aql
@@ -24,7 +24,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let.aql
index c3be0bd..8a78450 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let.aql
@@ -24,7 +24,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-jaccard-check-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-jaccard-check-let.aql
index 0652131..48ec727 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-jaccard-check-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-jaccard-check-let.aql
@@ -24,7 +24,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ulist-jaccard-check-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ulist-jaccard-check-let.aql
index 5a3b329..0757d41 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ulist-jaccard-check-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ulist-jaccard-check-let.aql
@@ -24,7 +24,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-let.aql
index 2c45ea8..ec97668 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-let.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-multi-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-multi-let.aql
index 8f8f014..bf1821e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-multi-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-multi-let.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql
index 78a285a..ee768d7 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql
index b05c0d1..ca2827d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql
@@ -25,9 +25,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.aql
index 87ebe4a..0e15708 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.aql
@@ -25,9 +25,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on CSX(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.aql
index 77aa691..1a97936 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.aql
@@ -25,9 +25,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql
index 9f6e66c..bf6bb82 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql
index 1979201..dae2a78 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql
@@ -25,9 +25,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql
index 25cdc53..21114fc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql
@@ -25,7 +25,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql
index e5dec4f..b90b4f7 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.aql
index ae24f13..c78f65c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.aql
index f7df554..3bbcc28 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql
index 61816b8..7dca72a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql
@@ -25,7 +25,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard.aql
index d16776a..79d8baf 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.aql
index 0d4872b..0efb32d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql
index c89fdc3..2bca24d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql
@@ -25,7 +25,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql
index 72d0341..8805542 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.aql
index 99a3c79..2d2e0cc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.aql
@@ -25,9 +25,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql
index 24c66ac..0d5bf9c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard.aql
index ef91b89..5ffa92e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard.aql
@@ -25,9 +25,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_01.aql
index f9cc7e9..d24ee17 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_01.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_02.aql
index 011afea..ec553a0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_02.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on CSX(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_03.aql
index 649c3b6..4de1680 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_03.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_04.aql
index e6a3c56..091676f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_04.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_01.aql
index a94e84f..8a0282c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_01.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_02.aql
index 61edf5c..5d026db 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_02.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on CSX(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_03.aql
index 6c61de1..11b39c5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_03.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_04.aql
index 5925f47..b8952e9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_04.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_01.aql
index f6dd893..4704213 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_01.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on CSX(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_02.aql
index d9822aa..8454be9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_02.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_03.aql
index 0431a6c..4187d8f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_03.aql
@@ -15,7 +15,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(authors) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_01.aql
index 933e5d0..84fb8f3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_01.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_02.aql
index 1213935..d0c3c6f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_02.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on CSX(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_03.aql
index 70c7640..a725edc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_03.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_01.aql
index 4d7939a..c25e2e3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_01.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_02.aql
index e0acebb..0e9f4d5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_02.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on CSX(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_03.aql
index 808965d..bbd0f51 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_03.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_04.aql
index e66cdc5..956f712 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_04.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_01.aql
index a28a6af..742cb69 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_01.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_02.aql
index a4ff58d..a620399 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_02.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index ngram_index on CSX(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_03.aql
index 1a90cf9..d12bf98 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_03.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_04.aql
index 7be6773..6b308c8 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_04.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index ngram_index on DBLP(title) type ngram(3);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_01.aql
index 84d2f45..1afe025 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_01.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_02.aql
index b0ec650..aec4c85 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_02.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers2(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_03.aql
index 03a1a3f..47cea17 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_03.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_04.aql
index e7814af..9abc4e8 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_04.aql
@@ -24,7 +24,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_01.aql
index ac25ac2..b8f6144 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_01.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_02.aql
index a160c4d..7b17749 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_02.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers2(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_03.aql
index dba92b9..7132950 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_03.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_04.aql
index 4835727..84c7d5b 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_04.aql
@@ -24,7 +24,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_01.aql
index 1dddf5e..c84bd28 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_01.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_02.aql
index 15b1001..c6b9d9c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_02.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers2(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_03.aql
index af4814f..962f05c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_03.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_01.aql
index 2b46707..4b3793a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_01.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_02.aql
index 4f188dd..60cdb07 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_02.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers2(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_03.aql
index 05cf635..2df41b5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_03.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_01.aql
index 60fc705..8968ec6 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_01.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_02.aql
index 9cdb78b..96b8622 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_02.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers2(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_03.aql
index b1c4d68..e8763b4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_03.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_04.aql
index a1e1478..3194e47 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_04.aql
@@ -24,7 +24,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_01.aql
index cabdac1..1c4c7c2 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_01.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_02.aql
index 49a7411..212b64b 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_02.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers2(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_03.aql
index 639d782..22b4108 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_03.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_04.aql
index c3e34b0..32e8a6c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_04.aql
@@ -24,7 +24,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_01.aql
index c19ce47..cd391a4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_01.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_02.aql
index 6e3a274..a7d7118 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_02.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers2(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_03.aql
index 396af09..a5ec440 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_03.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_01.aql
index 8792dd2..dead8cc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_01.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_02.aql
index 185df97..b18abf8 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_02.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers2(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_03.aql
index 4d2b2e7..5afd428 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_03.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_04.aql
index acf4d71..3c8bdd7 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_04.aql
@@ -24,7 +24,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_01.aql
index 0d5ad31..2001a2a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_01.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_02.aql
index 8ac631e..95ce2ad 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_02.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 create index interests_index on Customers2(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_03.aql
index 94aedbb..e4fc27f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_03.aql
@@ -23,7 +23,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_04.aql
index e30fbcf..6356f79 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_04.aql
@@ -24,7 +24,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
 create index interests_index on Customers(interests) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_01.aql
index 8438509..ff39969 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_01.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_02.aql
index 6f6c5f6..f693855 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_02.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index keyword_index on CSX(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_03.aql
index 3f3a247..72f9bdc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_03.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_01.aql
index 14f63bf..2a076bc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_01.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_02.aql
index 46932be..98b3972 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_02.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index keyword_index on CSX(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_03.aql
index 77ac9ba..dace174 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_03.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_04.aql
index 6129c52..fa68be2 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_04.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_01.aql
index 47d1192..df0b75e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_01.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_02.aql
index ac33cb0..1f227c8 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_02.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 create index keyword_index on CSX(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_03.aql
index 128e500..7a719ff 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_03.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_04.aql
index 325f955..077803b 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_04.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 create index keyword_index on DBLP(title) type keyword;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/join-super-key_01.aql b/asterix-app/src/test/resources/optimizerts/queries/join-super-key_01.aql
index f0251ad..52b35b8 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/join-super-key_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/join-super-key_01.aql
@@ -66,9 +66,9 @@
 write output to nc1:"/tmp/join-super-key_01.adm";
 
 create dataset LineItems(LineItemType)
-  partitioned by key l_partkey, l_linenumber on group1;
+  primary key l_partkey, l_linenumber on group1;
 create dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 
 for $li in dataset('LineItems')
diff --git a/asterix-app/src/test/resources/optimizerts/queries/join-super-key_02.aql b/asterix-app/src/test/resources/optimizerts/queries/join-super-key_02.aql
index 0904f37..98f7e9e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/join-super-key_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/join-super-key_02.aql
@@ -66,9 +66,9 @@
 write output to nc1:"/tmp/join-super-key_01.adm";
 
 create dataset LineItems(LineItemType)
-  partitioned by key l_partkey, l_linenumber on group1;
+  primary key l_partkey, l_linenumber on group1;
 create dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 
 for $ps in dataset('PartSupp')
diff --git a/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_01.aql b/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_01.aql
index 1f43f73..4855576 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_01.aql
@@ -66,9 +66,9 @@
 write output to nc1:"/tmp/loj-super-key_01.adm";
 
 create dataset LineItems(LineItemType)
-  partitioned by key l_partkey, l_linenumber on group1;
+  primary key l_partkey, l_linenumber on group1;
 create dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 
 for $li in dataset('LineItems')
diff --git a/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_02.aql b/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_02.aql
index ceab193..ce937c5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_02.aql
@@ -66,9 +66,9 @@
 write output to nc1:"/tmp/loj-super-key_01.adm";
 
 create dataset LineItems(LineItemType)
-  partitioned by key l_partkey, l_linenumber on group1;
+  primary key l_partkey, l_linenumber on group1;
 create dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 
 for $ps in dataset('PartSupp')
diff --git a/asterix-app/src/test/resources/optimizerts/queries/nested_loj2.aql b/asterix-app/src/test/resources/optimizerts/queries/nested_loj2.aql
index 0b77f55..d235b09 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/nested_loj2.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/nested_loj2.aql
@@ -47,11 +47,11 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 create dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 
 write output to nc1:"/tmp/nested_loj.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/nested_loj3.aql b/asterix-app/src/test/resources/optimizerts/queries/nested_loj3.aql
index 3a1ef04..1616a67 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/nested_loj3.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/nested_loj3.aql
@@ -57,13 +57,13 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 create dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 create dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 write output to nc1:"/tmp/nested_loj.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/noncollocated.aql b/asterix-app/src/test/resources/optimizerts/queries/noncollocated.aql
index 3aa1855..26b5ab0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/noncollocated.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/noncollocated.aql
@@ -21,10 +21,10 @@
 create nodegroup group2 if not exists on nc2;
 
 create dataset Users(UserType) 
-  partitioned by key uid on group1;
+  primary key uid on group1;
 
 create dataset Visitors(VisitorType) 
-  partitioned by key vid on group2;
+  primary key vid on group2;
 
 
 write output to nc1:"/tmp/fuzzy1.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orderby-desc-using-gby.aql b/asterix-app/src/test/resources/optimizerts/queries/orderby-desc-using-gby.aql
index 682800c..8cef009 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orderby-desc-using-gby.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orderby-desc-using-gby.aql
@@ -24,7 +24,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
   
 write output to nc1:"rttest/gby-using-orderby-desc.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-aggreg.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-aggreg.aql
index c2f96f6..960a8e6 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-aggreg.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-aggreg.aql
@@ -16,7 +16,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Orders(OrderType)
-  partitioned by key oid on group1;
+  primary key oid on group1;
 
 write output to nc1:"/tmp/orders-aggreg.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-composite-index-search.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-composite-index-search.aql
index d82be3c..73ec062 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-composite-index-search.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-composite-index-search.aql
@@ -14,7 +14,7 @@
   o_comment: string
 }
 
-create dataset Orders(OrderType) partitioned by key o_orderkey;
+create dataset Orders(OrderType) primary key o_orderkey;
 
 create index idx_Custkey_Orderstatus on Orders(o_custkey, o_orderstatus);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_01.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_01.aql
index 150e962..b5aa50d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_01.aql
@@ -19,7 +19,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 
 load dataset Orders 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_02.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_02.aql
index 725da3a..daef719 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_02.aql
@@ -19,7 +19,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 
 load dataset Orders 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_01.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_01.aql
index 0a42fa6..21a7f5a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_01.aql
@@ -19,7 +19,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 
 load dataset Orders 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_02.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_02.aql
index fed390c..25a6d3f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_02.aql
@@ -19,7 +19,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 
 load dataset Orders 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-open.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-open.aql
index a6bfa42..31d41b6 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-open.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-open.aql
@@ -20,7 +20,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 
 create index idx_Orders_Custkey on Orders(o_custkey);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search.aql
index 75be90b..4e01059 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search.aql
@@ -20,7 +20,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 
 create index idx_Orders_Custkey on Orders(o_custkey);
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search-open.aql b/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search-open.aql
index 09a1ef6..1f6f887 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search-open.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search-open.aql
@@ -21,7 +21,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 
 
 write output to nc1:"/tmp/prim_index_search.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search.aql b/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search.aql
index 7a22597..7939fed 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search.aql
@@ -21,7 +21,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 
 
 write output to nc1:"/tmp/prim_index_search.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/pull_select_above_eq_join.aql b/asterix-app/src/test/resources/optimizerts/queries/pull_select_above_eq_join.aql
index dfc05ff..ee998cf 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/pull_select_above_eq_join.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/pull_select_above_eq_join.aql
@@ -21,10 +21,10 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Users(UserType) 
-  partitioned by key uid on group1;
+  primary key uid on group1;
 
 create dataset Visitors(VisitorType) 
-  partitioned by key vid on group1;
+  primary key vid on group1;
 
 
 write output to nc1:"/tmp/pull-select-above-eq-join.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/push-project-through-group.aql b/asterix-app/src/test/resources/optimizerts/queries/push-project-through-group.aql
index 5dd229b..3c0235c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/push-project-through-group.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/push-project-through-group.aql
@@ -15,7 +15,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 
 write output to nc1:'rttest/fuzzyjoin_080.adm';
diff --git a/asterix-app/src/test/resources/optimizerts/queries/push_limit.aql b/asterix-app/src/test/resources/optimizerts/queries/push_limit.aql
index a9c4fe7..0ffe2b0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/push_limit.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/push_limit.aql
@@ -16,7 +16,7 @@
 
 create nodegroup group1 if not exists on nc1, nc2;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 
 write output to nc1:"/tmp/push_limit.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/q1.aql b/asterix-app/src/test/resources/optimizerts/queries/q1.aql
index 8f62292..ced342d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/q1.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/q1.aql
@@ -27,7 +27,7 @@
 
 create nodegroup group1 if not exists on nc1, nc2;
 
-create dataset User(UserType) partitioned by key name on group1;
+create dataset User(UserType) primary key name on group1;
 
 write output to nc1:"/tmp/q1.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/q1_pricing_summary_report_nt.aql b/asterix-app/src/test/resources/optimizerts/queries/q1_pricing_summary_report_nt.aql
new file mode 100644
index 0000000..b6de1fa
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/q1_pricing_summary_report_nt.aql
@@ -0,0 +1,50 @@
+drop dataverse tpch if exists;
+create dataverse tpch;
+use dataverse tpch;
+
+create type LineItemType as closed {
+  l_orderkey: int32, 
+  l_partkey: int32, 
+  l_suppkey: int32, 
+  l_linenumber: int32, 
+  l_quantity: double, 
+  l_extendedprice: double,
+  l_discount: double, 
+  l_tax: double,
+  l_returnflag: string, 
+  l_linestatus: string, 
+  l_shipdate: string,
+  l_commitdate: string, 
+  l_receiptdate: string, 
+  l_shipinstruct: string, 
+  l_shipmode: string, 
+  l_comment: string
+}
+
+create dataset LineItem(LineItemType)
+  primary key l_orderkey, l_linenumber;
+
+load dataset LineItem 
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+write output to nc1:"rttest/tpch_q1_pricing_summary_report_nt.adm";
+ 
+for $l in dataset('LineItem')
+where $l.l_shipdate <= '1998-09-02'
+/*+ hash*/
+group by $l_returnflag := $l.l_returnflag, $l_linestatus := $l.l_linestatus  
+  with $l
+order by $l_returnflag, $l_linestatus
+return {
+  "l_returnflag": $l_returnflag,
+  "l_linestatus": $l_linestatus,
+  "sum_qty": sum(for $i in $l return $i.l_quantity),
+  "sum_base_price": sum(for $i in $l return $i.l_extendedprice),
+  "sum_disc_price": sum(for $i in $l return $i.l_extendedprice * (1 - $i.l_discount)),
+  "sum_charge": sum(for $i in $l return $i.l_extendedprice * (1 - $i.l_discount) * (1 + $i.l_tax)),
+  "ave_qty": avg(for $i in $l return $i.l_quantity),  
+  "ave_price": avg(for $i in $l return $i.l_extendedprice),
+  "ave_disc": avg(for $i in $l return $i.l_discount),
+  "count_order": count($l)
+}   
diff --git a/asterix-app/src/test/resources/optimizerts/queries/q2.aql b/asterix-app/src/test/resources/optimizerts/queries/q2.aql
index 17b1f86..5fba695 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/q2.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/q2.aql
@@ -34,7 +34,7 @@
 
 create nodegroup group1 if not exists on nc1, nc2;
 
-create dataset Event(EventType) partitioned by key name on group1;
+create dataset Event(EventType) primary key name on group1;
 
 write output to nc1:"/tmp/q2.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/q3_shipping_priority.aql b/asterix-app/src/test/resources/optimizerts/queries/q3_shipping_priority.aql
index 89c6fd9..8e6bf6e6 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/q3_shipping_priority.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/q3_shipping_priority.aql
@@ -49,11 +49,11 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 create dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 
 write output to nc1:"/tmp/q3_shipping_priority.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/q5_local_supplier_volume.aql b/asterix-app/src/test/resources/optimizerts/queries/q5_local_supplier_volume.aql
index 6b5ccfa..670a392 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/q5_local_supplier_volume.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/q5_local_supplier_volume.aql
@@ -72,17 +72,17 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 create dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 create dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 create dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 create dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
   
 write output to nc1:"/tmp/q5_local_supplier.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_01.aql b/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_01.aql
index 207b5c9..e95be99 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_01.aql
@@ -20,8 +20,8 @@
   rec: rectangle
 }
 
-create dataset MyData1(MyRecord) partitioned by key id;
-create dataset MyData2(MyRecord) partitioned by key id;
+create dataset MyData1(MyRecord) primary key id;
+create dataset MyData2(MyRecord) primary key id;
 
 create index rtree_index on MyData1(point) type rtree;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_02.aql b/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_02.aql
index e70bedf..c0a8634 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_02.aql
@@ -20,8 +20,8 @@
   rec: rectangle
 }
 
-create dataset MyData1(MyRecord) partitioned by key id;
-create dataset MyData2(MyRecord) partitioned by key id;
+create dataset MyData1(MyRecord) primary key id;
+create dataset MyData2(MyRecord) primary key id;
 
 create index rtree_index on MyData2(point) type rtree;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_03.aql b/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_03.aql
index 85fc22b..6fb79f3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_03.aql
@@ -20,7 +20,7 @@
   rec: rectangle
 }
 
-create dataset MyData(MyRecord) partitioned by key id;
+create dataset MyData(MyRecord) primary key id;
 
 create index rtree_index on MyData(point) type rtree;
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index-open.aql b/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index-open.aql
index 14416c8..b4a5796 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index-open.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index-open.aql
@@ -17,7 +17,7 @@
 create nodegroup group1  if not exists on nc1, nc2;
 
 create dataset MyData(MyRecord)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index.aql b/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index.aql
index c8525c1..41a5a91 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index.aql
@@ -17,7 +17,7 @@
 create nodegroup group1  if not exists on nc1, nc2;
 
 create dataset MyData(MyRecord)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/scan-delete-all.aql b/asterix-app/src/test/resources/optimizerts/queries/scan-delete-all.aql
index d30b7f8..7b8215f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/scan-delete-all.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/scan-delete-all.aql
@@ -30,9 +30,9 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset LineItems_q1(LineItemType)
-  partitioned by key l_orderkey on group1;
+  primary key l_orderkey on group1;
 
 create dataset LineID(LineIDType)
-  partitioned by key l_orderkey on group1;
+  primary key l_orderkey on group1;
 
 delete $l from dataset LineItems_q1;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/scan-delete-rtree-secondary-index.aql b/asterix-app/src/test/resources/optimizerts/queries/scan-delete-rtree-secondary-index.aql
index 4d5de58..a307a1d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/scan-delete-rtree-secondary-index.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/scan-delete-rtree-secondary-index.aql
@@ -17,7 +17,7 @@
 create nodegroup group1  if not exists on nc1, nc2;
 
 create dataset MyData(MyRecord)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/scan-delete.aql b/asterix-app/src/test/resources/optimizerts/queries/scan-delete.aql
index 9bf6a4c..57e1bd3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/scan-delete.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/scan-delete.aql
@@ -31,9 +31,9 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset LineItems_q1(LineItemType)
-  partitioned by key l_orderkey on group1;
+  primary key l_orderkey on group1;
 
 create dataset LineID(LineIDType)
-  partitioned by key l_orderkey on group1;
+  primary key l_orderkey on group1;
 
 delete $l from dataset LineItems_q1 where $l.l_shipdate <= '1998-09-02';
diff --git a/asterix-app/src/test/resources/optimizerts/queries/scan-insert-secondary-index.aql b/asterix-app/src/test/resources/optimizerts/queries/scan-insert-secondary-index.aql
index afecc04..97bc8df 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/scan-insert-secondary-index.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/scan-insert-secondary-index.aql
@@ -30,10 +30,10 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset LineItems_q1(LineItemType)
-  partitioned by key l_orderkey on group1;
+  primary key l_orderkey on group1;
 
 create dataset LineID(LineIDType)
-  partitioned by key l_orderkey on group1;
+  primary key l_orderkey on group1;
   
 create index idx_LineID_partkey on LineID(l_partkey);
 create index idx_LineID_suppkey on LineID(l_suppkey);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/scan-insert.aql b/asterix-app/src/test/resources/optimizerts/queries/scan-insert.aql
index beb73a7..1c26a46 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/scan-insert.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/scan-insert.aql
@@ -30,10 +30,10 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset LineItems_q1(LineItemType)
-  partitioned by key l_orderkey on group1;
+  primary key l_orderkey on group1;
 
 create dataset LineID(LineIDType)
-  partitioned by key l_orderkey on group1;
+  primary key l_orderkey on group1;
 
 insert into dataset LineID (
 for $l in dataset('LineItems_q1')
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_01.aql
index 96d3b1a..4d63c15 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_01.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_01.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_02.aql
index e0ef1aa..762b41c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_02.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_02.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_03.aql
index 75a584e..e14f329 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_03.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_03.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_04.aql
index 2f42ab0..f539938 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_04.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_04.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_05.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_05.aql
index 73f8ddb..b126f67 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_05.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_05.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_05.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_06.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_06.aql
index 221bfb8..f32285e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_06.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_06.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_06.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_07.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_07.aql
index 949ed2c..2dff7e5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_07.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_07.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_07.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_08.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_08.aql
index 3f899a2..9bc10e6 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_08.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_08.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_08.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_01.aql
index 0370fff..09e962c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_01.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_01.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_02.aql
index 77d78f8..35e28fc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_02.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_02.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_03.aql
index c34c28a..e05f85a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_03.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_03.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_04.aql
index 082dc39..b890967 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_04.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_04.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_05.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_05.aql
index 6d892df..bc66061 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_05.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_05.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_05.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_06.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_06.aql
index 20d4879..018d6d1 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_06.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_06.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_06.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_07.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_07.aql
index 49871f9..cdaefa9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_07.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_07.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_07.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_08.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_08.aql
index ed6d406..745044f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_08.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_08.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_08.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-edit-distance-check.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-edit-distance-check.aql
index 702b739..f6e9d52 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-edit-distance-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-edit-distance-check.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_fuzzyeq-to-edit-distance-check.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-jaccard-check.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-jaccard-check.aql
index eaacc3e..5f57a3e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-jaccard-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-jaccard-check.aql
@@ -16,7 +16,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_fuzzyeq-to-jaccard-check.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_01.aql
index 8647565..733c6d9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_01.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_02.aql
index b48eb24..a360ef1 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_02.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_03.aql
index 74dcf9d..ad8abca 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_03.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_04.aql
index e17d458..fd236de 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_04.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_05.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_05.aql
index 266f369..b750a5d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_05.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_05.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_06.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_06.aql
index 3beec4b..56b1471 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_06.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_06.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_07.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_07.aql
index 86401ac..6ae0444 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_07.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_07.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_08.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_08.aql
index 372a3e8..5d04402 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_08.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_08.aql
@@ -18,7 +18,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_01.aql
index 8e8a0f8..33eb133 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_01.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_01.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_02.aql
index bee4595..3bdf01f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_02.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_02.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_03.aql
index 591c8f0..3a22a16 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_03.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_02.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_04.aql
index 2d10e1d..bdce095 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_04.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_03.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_05.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_05.aql
index f091dcd..977f22a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_05.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_05.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_05.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_06.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_06.aql
index 7b98eaf..eb3f3ef 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_06.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_06.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_06.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_07.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_07.aql
index 71f087e..753ce3c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_07.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_07.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_07.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_08.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_08.aql
index 13d11ed..2d1bd93 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_08.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_08.aql
@@ -17,7 +17,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_08.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/sort-cust.aql b/asterix-app/src/test/resources/optimizerts/queries/sort-cust.aql
index ec4f6f1..9605b37 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/sort-cust.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/sort-cust.aql
@@ -27,7 +27,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Customers(CustomerType)
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 write output to nc1:"/tmp/custlimit.adm";
 
diff --git a/asterix-app/src/test/resources/optimizerts/queries/unnest_list_in_subplan.aql b/asterix-app/src/test/resources/optimizerts/queries/unnest_list_in_subplan.aql
index 5a943f1..3f34242 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/unnest_list_in_subplan.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/unnest_list_in_subplan.aql
@@ -17,8 +17,8 @@
 
 create nodegroup group1 if not exists on nc1, nc2;
 
-create dataset DBLP(DBLPType) partitioned by key id on group1;
-create dataset TOKENSRANKEDADM(TOKENSRANKEDADMType) partitioned by key rank on group1;
+create dataset DBLP(DBLPType) primary key id on group1;
+create dataset TOKENSRANKEDADM(TOKENSRANKEDADMType) primary key rank on group1;
 
 write output to nc1:'rttest/unnest_list_in_subplan.adm';
 
diff --git a/asterix-app/src/test/resources/optimizerts/results/q1_pricing_summary_report_nt.plan b/asterix-app/src/test/resources/optimizerts/results/q1_pricing_summary_report_nt.plan
new file mode 100644
index 0000000..676e6e2
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/q1_pricing_summary_report_nt.plan
@@ -0,0 +1,25 @@
+-- SINK_WRITE  |PARTITIONED|
+  -- STREAM_PROJECT  |PARTITIONED|
+    -- ASSIGN  |PARTITIONED|
+      -- SORT_MERGE_EXCHANGE [$$1(ASC), $$2(ASC) ]  |PARTITIONED|
+        -- STABLE_SORT [$$1(ASC), $$2(ASC)]  |PARTITIONED|
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            -- EXTERNAL_GROUP_BY[$$74, $$75]  |PARTITIONED|
+                    {
+                      -- AGGREGATE  |LOCAL|
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                    }
+              -- HASH_PARTITION_EXCHANGE [$$74, $$75]  |PARTITIONED|
+                -- EXTERNAL_GROUP_BY[$$48, $$49]  |PARTITIONED|
+                        {
+                          -- AGGREGATE  |LOCAL|
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                        }
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    -- STREAM_SELECT  |PARTITIONED|
+                      -- ASSIGN  |PARTITIONED|
+                        -- STREAM_PROJECT  |PARTITIONED|
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            -- DATASOURCE_SCAN  |PARTITIONED|
+                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/avg_empty_02.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/avg_empty_02.aql
index 3583ce0..2c4ba6a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/avg_empty_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/avg_empty_02.aql
@@ -13,7 +13,7 @@
   val: double
 }
 
-create dataset Test(TestType) partitioned by key id;
+create dataset Test(TestType) primary key id;
 
 write output to nc1:"rttest/aggregate_avg_empty_02.adm";
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/count_empty_02.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/count_empty_02.aql
index 0a6cc8e..9ab7168 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/count_empty_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/count_empty_02.aql
@@ -13,7 +13,7 @@
   val: double
 }
 
-create dataset Test(TestType) partitioned by key id;
+create dataset Test(TestType) primary key id;
 
 write output to nc1:"rttest/aggregate_count_empty_02.adm";
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/max_empty_02.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/max_empty_02.aql
index 79ae1d8..8b954e1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/max_empty_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/max_empty_02.aql
@@ -13,7 +13,7 @@
   val: double
 }
 
-create dataset Test(TestType) partitioned by key id;
+create dataset Test(TestType) primary key id;
 
 write output to nc1:"rttest/aggregate_max_empty_02.adm";
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/min_empty_02.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/min_empty_02.aql
index 99d49f4..6aa122c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/min_empty_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/min_empty_02.aql
@@ -13,7 +13,7 @@
   val: double
 }
 
-create dataset Test(TestType) partitioned by key id;
+create dataset Test(TestType) primary key id;
 
 write output to nc1:"rttest/aggregate_min_empty_02.adm";
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_empty_02.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_empty_02.aql
index a94457a..c3fd645 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_empty_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_empty_02.aql
@@ -13,7 +13,7 @@
   val: double
 }
 
-create dataset Test(TestType) partitioned by key id;
+create dataset Test(TestType) primary key id;
 
 write output to nc1:"rttest/aggregate_sum_empty_02.adm";
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_null-with-pred.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_null-with-pred.aql
index 10e952c..8c695c5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_null-with-pred.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_null-with-pred.aql
@@ -16,7 +16,7 @@
 sal:int32?
 }
 
-create dataset tdst(TestType) partitioned by key id;
+create dataset tdst(TestType) primary key id;
 
 insert into dataset tdst({"id":123,"sal":1000});
 insert into dataset tdst({"id":113,"sal":2000});
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_numeric_null.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_numeric_null.aql
index 9197e33..4c568b1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_numeric_null.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_numeric_null.aql
@@ -15,7 +15,7 @@
 sal:int32?
 }
 
-create dataset tdst(TestType) partitioned by key id;
+create dataset tdst(TestType) primary key id;
 
 // In AQL
 // sum(numeric + null) => null
diff --git a/asterix-app/src/test/resources/runtimets/queries/comparison/date_order.aql b/asterix-app/src/test/resources/runtimets/queries/comparison/date_order.aql
new file mode 100644
index 0000000..d38d386
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/comparison/date_order.aql
@@ -0,0 +1,16 @@
+drop dataverse test if exists;
+
+create dataverse test;
+
+write output to nc1:"rttest/comparison_date_order.adm";
+
+let $d1 := date("2049-04-23")
+let $d2 := date("2012-02-29")
+let $d3 := date("2021-03-01")
+let $d4 := date("1362-02-28")
+let $d5 := date("1600-02-29")
+let $d6 := date("-0500-03-21")
+
+for $d in [$d1, $d2, $d3, $d4, $d5, $d6]
+order by $d
+return $d
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/comparison/datetime_range.aql b/asterix-app/src/test/resources/runtimets/queries/comparison/datetime_range.aql
index 4268231..a1cbf52 100644
--- a/asterix-app/src/test/resources/runtimets/queries/comparison/datetime_range.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/comparison/datetime_range.aql
@@ -11,7 +11,7 @@
 }
 
 create dataset TwitterData(Tweet)
-  partitioned by key id;
+  primary key id;
 
 load dataset TwitterData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/comparison/time_order.aql b/asterix-app/src/test/resources/runtimets/queries/comparison/time_order.aql
new file mode 100644
index 0000000..ba0d211
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/comparison/time_order.aql
@@ -0,0 +1,16 @@
+drop dataverse test if exists;
+
+create dataverse test;
+
+write output to nc1:"rttest/comparison_time_order.adm";
+
+let $t1 := time("13:00:00.382-10:00")
+let $t2 := time("23:59:59.999Z")
+let $t3 := time("22:00:00+03:00")
+let $t4 := time("00:00:00.00Z")
+let $t5 := time("00:00:00.00-02:00")
+let $t6 := time("00:00:00.47+04:00")
+
+for $t in [$t1, $t2, $t3, $t4, $t5, $t6]
+order by $t
+return $t
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/constructor/int_01.aql b/asterix-app/src/test/resources/runtimets/queries/constructor/int_01.aql
index d35c84f..58ae18d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/constructor/int_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/constructor/int_01.aql
@@ -12,5 +12,5 @@
 let $c6 := int16("-160i16")
 let $c7 := int32("-320")
 let $c8 := int64("-640i64")
-return {"int8": $c1,"int16": $c2,"int32": $c3, "int64": $c4, "int8": $c5,"int16": $c6,"int32": $c7, "int64": $c8}
-
+let $c9 := int64("-9223372036854775808")
+return {"int8": $c1,"int16": $c2,"int32": $c3, "int64": $c4, "int8_2": $c5,"int16_2": $c6,"int32_2": $c7, "int64_2": $c8, "int64_min" : $c9}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/constructor/interval.aql b/asterix-app/src/test/resources/runtimets/queries/constructor/interval.aql
new file mode 100644
index 0000000..71e90fa
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/constructor/interval.aql
@@ -0,0 +1,15 @@
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/constructor_interval.adm";
+
+let $itv1 := interval-from-date("2010-10-30", "2012-10-21")
+let $itv2 := interval-from-time("03:04:05.678", "232425267")
+let $itv3 := interval-from-datetime("-1987-11-19T02:43:57.938", "1999-11-12T12:49:35.948")
+let $itv4 := interval-start-from-date("0001-12-27", "P3Y394DT48H398.483S")
+let $itv5 := interval-start-from-time("20:03:20.948", "PT48M389.938S")
+let $itv6 := interval-start-from-datetime("-2043-11-19T15:32:39.293", "P439Y3M20DT20H39M58.949S")
+
+return {"interval1": $itv1, "interval2": $itv2, "interval3": $itv3, "interval4": $itv4, "interval5": $itv5, "interval6": $itv6}
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv01.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv01.aql
index e95d102..eaec65a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv01.aql
@@ -30,10 +30,10 @@
 dept : string
 }
 
-create dataset student.ugdstd(stdType) partitioned by key id;
-create dataset student.gdstd(stdType) partitioned by key id;
-create dataset teacher.prof(tchrType) partitioned by key id;
-create dataset teacher.pstdoc(tchrType) partitioned by key id;
+create dataset student.ugdstd(stdType) primary key id;
+create dataset student.gdstd(stdType) primary key id;
+create dataset teacher.prof(tchrType) primary key id;
+create dataset teacher.pstdoc(tchrType) primary key id;
 
 insert into dataset student.ugdstd({"id":457,"name":"John Doe","age":22,"sex":"M","dept":"Dance"});
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv02.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv02.aql
index 1257cac..4ab7c29 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv02.aql
@@ -30,10 +30,10 @@
 dept : string
 }
 
-create dataset student.ugdstd(stdType) partitioned by key id;
-create dataset student.gdstd(stdType) partitioned by key id;
-create dataset teacher.prof(tchrType) partitioned by key id;
-create dataset teacher.pstdoc(tchrType) partitioned by key id;
+create dataset student.ugdstd(stdType) primary key id;
+create dataset student.gdstd(stdType) primary key id;
+create dataset teacher.prof(tchrType) primary key id;
+create dataset teacher.pstdoc(tchrType) primary key id;
 
 insert into dataset student.ugdstd({"id":457,"name":"John Doe","age":22,"sex":"M","dept":"Dance"});
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv03.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv03.aql
index 0c80540..a68d428 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv03.aql
@@ -32,10 +32,10 @@
 dept : string
 }
 
-create dataset student.ugdstd(stdType) partitioned by key id;
-create dataset student.gdstd(stdType) partitioned by key id;
-create dataset teacher.prof(tchrType) partitioned by key id;
-create dataset teacher.pstdoc(tchrType) partitioned by key id;
+create dataset student.ugdstd(stdType) primary key id;
+create dataset student.gdstd(stdType) primary key id;
+create dataset teacher.prof(tchrType) primary key id;
+create dataset teacher.pstdoc(tchrType) primary key id;
 
 drop dataset student.ugdstd;
 drop dataset student.gdstd;
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv04.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv04.aql
index 20be103..55a3fa0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv04.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv04.aql
@@ -33,20 +33,20 @@
 dept : string
 }
 
-create dataset student.ugdstd(stdType) partitioned by key id;
-create dataset student.gdstd(stdType) partitioned by key id;
-create dataset teacher.prof(tchrType) partitioned by key id;
-create dataset teacher.pstdoc(tchrType) partitioned by key id;
+create dataset student.ugdstd(stdType) primary key id;
+create dataset student.gdstd(stdType) primary key id;
+create dataset teacher.prof(tchrType) primary key id;
+create dataset teacher.pstdoc(tchrType) primary key id;
 
 drop dataset student.ugdstd;
 drop dataset student.gdstd;
 drop dataset teacher.prof;
 drop dataset teacher.pstdoc;
 
-create dataset student.ugdstd(stdType) partitioned by key id;
-create dataset student.gdstd(stdType) partitioned by key id;
-create dataset teacher.prof(tchrType) partitioned by key id;
-create dataset teacher.pstdoc(tchrType) partitioned by key id;
+create dataset student.ugdstd(stdType) primary key id;
+create dataset student.gdstd(stdType) primary key id;
+create dataset teacher.prof(tchrType) primary key id;
+create dataset teacher.pstdoc(tchrType) primary key id;
 
 for $l in dataset('Metadata.Dataset')
 where $l.DataverseName='student' or $l.DataverseName='teacher'
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv07.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv07.aql
index 4bba053..25e5d67 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv07.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv07.aql
@@ -18,7 +18,7 @@
 dept:string
 }
 
-create dataset test.employee(Emp) partitioned by key id;
+create dataset test.employee(Emp) primary key id;
 
 load dataset test.employee
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv08.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv08.aql
index 10985e3..8b4a32d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv08.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv08.aql
@@ -22,8 +22,8 @@
 id : int32
 }
 
-create dataset test.t1(testtype) partitioned by key id;
-create dataset fest.t1(testtype) partitioned by key id;
+create dataset test.t1(testtype) primary key id;
+create dataset fest.t1(testtype) primary key id;
 
 insert into dataset test.t1({"id":24});
 insert into dataset test.t1({"id":23});
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv17.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv17.aql
index 26556e0..8e42043 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv17.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv17.aql
@@ -20,8 +20,8 @@
 id : int32
 }
 
-create dataset test.t1(testtype) partitioned by key id;
-create dataset fest.t1(testtype) partitioned by key id;
+create dataset test.t1(testtype) primary key id;
+create dataset fest.t1(testtype) primary key id;
 
 insert into dataset test.t1({"id":24});
 insert into dataset test.t1({"id":23});
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv18.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv18.aql
index 0d3bd53..69dd67d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv18.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv18.aql
@@ -21,8 +21,8 @@
 id : int32
 }
 
-create dataset test.t1(testtype) partitioned by key id;
-create dataset fest.t1(testtype) partitioned by key id;
+create dataset test.t1(testtype) primary key id;
+create dataset fest.t1(testtype) primary key id;
 
 insert into dataset test.t1({"id":24});
 insert into dataset test.t1({"id":23});
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv19.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv19.aql
index 335f11c..6a0cf97 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv19.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv19.aql
@@ -33,17 +33,17 @@
   text: string
 }
 
-create dataset test1.t1(testtype) partitioned by key id;
+create dataset test1.t1(testtype) primary key id;
 
-create dataset test2.t2(testtype) partitioned by key id;
+create dataset test2.t2(testtype) primary key id;
 
-create dataset test2.t3(testtype) partitioned by key id;
+create dataset test2.t3(testtype) primary key id;
 
-create dataset test1.t2(testtype) partitioned by key id;
+create dataset test1.t2(testtype) primary key id;
 
-create dataset test1.t3(testtype) partitioned by key id;
+create dataset test1.t3(testtype) primary key id;
 
-create dataset test2.t4(testtype) partitioned by key id;
+create dataset test2.t4(testtype) primary key id;
 
 create external dataset test1.TwitterData(Tweet)
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/drop_dataset.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/drop_dataset.aql
index 7b14957..860dd74 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/drop_dataset.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/drop_dataset.aql
@@ -20,7 +20,7 @@
 };
 
 create dataset test.Customers(CustomerType)
-partitioned by key cid;
+primary key cid;
 
 drop dataset test.Customers;
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_across_dataverses.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_across_dataverses.aql
index 7eebbaf..2395a48 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_across_dataverses.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_across_dataverses.aql
@@ -42,10 +42,10 @@
 };
 
 create dataset test1.Customers(CustomerType)
-partitioned by key cid;
+primary key cid;
 
 create dataset test2.Customers(CustomerType)
-partitioned by key cid;
+primary key cid;
 
 load dataset test1.Customers
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_from_source_dataset.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_from_source_dataset.aql
index 89180ef..3c96539 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_from_source_dataset.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_from_source_dataset.aql
@@ -14,7 +14,7 @@
 
 write output to nc1:"rttest/cross-dataverse_insert_from_source_dataset.adm";
 
-create dataset test.t1(testtype) partitioned by key id;
+create dataset test.t1(testtype) primary key id;
 
 insert into dataset test.t1({"id":456,"name":"Roger"});
 insert into dataset test.t1({"id":351,"name":"Bob"});
@@ -22,7 +22,7 @@
 insert into dataset test.t1({"id":926,"name":"Richard"});
 insert into dataset test.t1({"id":482,"name":"Kevin"});
 
-create dataset test.t2(testtype) partitioned by key id;
+create dataset test.t2(testtype) primary key id;
 
 insert into dataset test.t2({"id":438,"name":"Ravi"});
 insert into dataset test.t2({"id":321,"name":"Bobby"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/join_across_dataverses.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/join_across_dataverses.aql
index d09755a..197baaa 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/join_across_dataverses.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/join_across_dataverses.aql
@@ -25,7 +25,7 @@
 };
 
 create dataset test1.Customers(CustomerType)
-partitioned by key cid;
+primary key cid;
 
 
 create type test2.OrderType as open {
@@ -39,7 +39,7 @@
 }
 
 create dataset test2.Orders(OrderType)
-partitioned by key oid;
+primary key oid;
 
 
 load dataset test1.Customers
@@ -57,4 +57,4 @@
 for $o in dataset('test2.Orders')
 where $c.cid = $o.cid
 order by $c.name, $o.total
-return {"cust_name":$c.name, "cust_age": $c.age, "order_total":$o.total, "orderList":[$o.oid, $o.cid], "orderList":{{$o.oid, $o.cid}}}
+return {"cust_name":$c.name, "cust_age": $c.age, "order_total":$o.total, "orderList":[$o.oid, $o.cid]}
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/co.aql b/asterix-app/src/test/resources/runtimets/queries/custord/co.aql
index 879b2b8..2b9cc6b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/co.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/co.aql
@@ -37,11 +37,11 @@
 }
 
 create dataset Customers3(CustomerType) 
-  partitioned by key cid;
+  primary key cid;
 create dataset Orders3(OrderType)
-  partitioned by key oid;
+  primary key oid;
 create dataset CustomerOrders3(CustomerOrdersType)
-  partitioned by key cid;
+  primary key cid;
 
 write output to nc1:"rttest/custord_co.adm";
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_01.aql b/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_01.aql
index decc8bb..bcc4079 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_01.aql
@@ -37,11 +37,11 @@
 }
 
 create dataset Customers1(CustomerType) 
-  partitioned by key cid;
+  primary key cid;
 create dataset Orders1(OrderType)
-  partitioned by key oid;
+  primary key oid;
 create dataset CustomerOrders1(CustomerOrdersType)
-  partitioned by key cid;
+  primary key cid;
 
 load dataset Customers1 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_02.aql b/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_02.aql
index 2ac6910..481adf4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_02.aql
@@ -37,11 +37,11 @@
 }
 
 create dataset Customers2(CustomerType) 
-  partitioned by key cid;
+  primary key cid;
 create dataset Orders2(OrderType)
-  partitioned by key oid;
+  primary key oid;
 create dataset CustomerOrders2(CustomerOrdersType)
-  partitioned by key cid;
+  primary key cid;
 
 load dataset Customers2 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_03.aql b/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_03.aql
index 324dbb0..92a049d5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_03.aql
@@ -37,11 +37,11 @@
 }
 
 create dataset Customers3(CustomerType) 
-  partitioned by key cid;
+  primary key cid;
 create dataset Orders3(OrderType)
-  partitioned by key oid;
+  primary key oid;
 create dataset CustomerOrders3(CustomerOrdersType)
-  partitioned by key cid;
+  primary key cid;
 
 load dataset Customers3 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/freq-clerk.aql b/asterix-app/src/test/resources/runtimets/queries/custord/freq-clerk.aql
index cd4ef0e..9bc318a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/freq-clerk.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/freq-clerk.aql
@@ -28,7 +28,7 @@
 }
 
 create dataset CustomerOrders(CustomerOrderType) 
-  partitioned by key cid;
+  primary key cid;
 
 
 load dataset CustomerOrders
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/join_q_01.aql b/asterix-app/src/test/resources/runtimets/queries/custord/join_q_01.aql
index ddde56e..f5d2080 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/join_q_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/join_q_01.aql
@@ -47,4 +47,4 @@
 for $o in dataset('Orders')
 where $c.cid = $o.cid 
 order by $c.name, $o.total
-return {"cust_name":$c.name, "cust_age": $c.age, "order_total":$o.total, "orderList":[$o.oid, $o.cid], "orderList":{{$o.oid, $o.cid}}} 
+return {"cust_name":$c.name, "cust_age": $c.age, "order_total":$o.total, "orderList":[$o.oid, $o.cid]} 
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/load-test.aql b/asterix-app/src/test/resources/runtimets/queries/custord/load-test.aql
index 5f73e4b..6cafbe0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/load-test.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/load-test.aql
@@ -21,9 +21,9 @@
 }
 
 create dataset c1(CustomerType) 
-  partitioned by key cid;
+  primary key cid;
 create dataset c2(CustomerType) 
-  partitioned by key cid;  
+  primary key cid;  
   
 load dataset c1 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/order_q_03.aql b/asterix-app/src/test/resources/runtimets/queries/custord/order_q_03.aql
index b930a10..52bc6d2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/order_q_03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/order_q_03.aql
@@ -26,4 +26,4 @@
 let $c2 := {{ $o.orderstatus, $o.clerk}}
 let $c3 := [$o.heList, $o.openlist, $o.loc, $o.line, $o.poly, $o.lastorder]
 let $c4 := [$o.heList, $o.openlist, $o.loc, $o.line, $o.poly, $o.lastorder]
-return { "orderid": $o.oid, "ordertot":$o.total, "list": $c1, "item1": $c1[0], "item1": $c1[?], "item2": $c1[1], "item3": $c1[2]}
+return { "orderid": $o.oid, "ordertot":$o.total, "list": $c1, "item1": $c1[0], "item2": $c1[1], "item3": $c1[2]}
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/order_q_04.aql b/asterix-app/src/test/resources/runtimets/queries/custord/order_q_04.aql
index b2d55d9..e2fd5b6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/order_q_04.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/order_q_04.aql
@@ -26,4 +26,4 @@
 let $c2 := {{ $o.orderstatus, $o.clerk}}
 let $c3 := [$o.heList, $o.openlist, $o.loc, $o.line, $o.poly, $o.lastorder]
 let $c4 := [$o.heList, $o.openlist, $o.loc, $o.line, $o.poly, $o.lastorder]
-return { "orderid": $o.oid, "ordertot":$o.total, "list": $c3, "item1": $c3[0], "item1": $c3[?], "item2": $c3[1], "item5": $c3[5], "item10": $c3[10]}
+return { "orderid": $o.oid, "ordertot":$o.total, "list": $c3, "item1": $c3[0], "item2": $c3[1], "item5": $c3[5], "item10": $c3[10]}
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/order_q_05.aql b/asterix-app/src/test/resources/runtimets/queries/custord/order_q_05.aql
index 5693312..481c1fd 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/order_q_05.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/order_q_05.aql
@@ -24,4 +24,4 @@
 for $o in dataset('Orders')
 let $c1 := []
 let $c2 := {{}}
-return { "orderid": $o.oid, "ordertot":$o.total, "emptyorderedlist": $c1, "emptyunorderedlist": $c2, "olist_item1": $c1[0], "olist_item1": $c1[?], "olist_item5": $c1[4], "ulist_item1": $c2[?]}
+return { "orderid": $o.oid, "ordertot":$o.total, "emptyorderedlist": $c1, "emptyunorderedlist": $c2, "olist_item1": $c1[0], "olist_item5": $c1[4], "ulist_item1": $c2[?]}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dapd/q3.aql b/asterix-app/src/test/resources/runtimets/queries/dapd/q3.aql
index 23e4557..63b9119 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dapd/q3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dapd/q3.aql
@@ -24,7 +24,7 @@
 }
 
 create dataset User(UserType)
- partitioned by key name;
+ primary key name;
 
 load dataset User 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/create-drop-cltype.aql b/asterix-app/src/test/resources/runtimets/queries/dml/create-drop-cltype.aql
new file mode 100644
index 0000000..f6c6e91
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/create-drop-cltype.aql
@@ -0,0 +1,40 @@
+/*
+ * Description  : Create and drop and recreate the same closed type, here type has optional fields.
+ *              : verify correctness by querying metadata.
+ * Date         : 11th Feb 2013
+ * Expected Res : Success
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type TestType as closed {
+id : int32,
+salary : double ?,
+name : string,
+durtn : duration ?,
+inter : interval,
+dt : date ?,
+tm : time,
+pt : point ?
+}
+
+drop type TestType;
+
+create type TestType as closed {
+id : int32,
+salary : double ?,
+name : string,
+durtn : duration ?,
+inter : interval,
+dt : date ?,
+tm : time,
+pt : point ?
+}
+
+write output to nc1:"rttest/dml_create-drop-cltype.adm";
+
+for $l in dataset('Metadata.Datatype')
+where $l.DatatypeName = 'TestType'
+return $l
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/create-drop-opntype.aql b/asterix-app/src/test/resources/runtimets/queries/dml/create-drop-opntype.aql
new file mode 100644
index 0000000..0ef6e93
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/create-drop-opntype.aql
@@ -0,0 +1,40 @@
+/*
+ * Description  : Create and drop and recreate the same open type, here type has optional fields.
+ *              : verify correctness by querying metadata.
+ * Date         : 11th Feb 2013
+ * Expected Res : Success
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type TestType as open {
+id : int32,
+salary : double ?,
+name : string,
+durtn : duration ?,
+inter : interval,
+dt : date ?,
+tm : time,
+pt : point ?
+}
+
+drop type TestType;
+
+create type TestType as open {
+id : int32,
+salary : double ?,
+name : string,
+durtn : duration ?,
+inter : interval,
+dt : date ?,
+tm : time,
+pt : point ?
+}
+
+write output to nc1:"rttest/dml_create-drop-opntype.adm"; 
+
+for $l in dataset('Metadata.Datatype')
+where $l.DatatypeName = 'TestType'
+return $l
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset-with-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset-with-index.aql
index ab6b6c9..e642073 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset-with-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset-with-index.aql
@@ -22,7 +22,7 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset.aql b/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset.aql
index 1a445bf..ff66d65 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset.aql
@@ -22,7 +22,7 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/drop-empty-secondary-indexes.aql b/asterix-app/src/test/resources/runtimets/queries/dml/drop-empty-secondary-indexes.aql
new file mode 100644
index 0000000..b25f028
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/drop-empty-secondary-indexes.aql
@@ -0,0 +1,37 @@
+/*
+ * Description     : Drop empty secondary index.
+ * Expected Result : Success
+ * Date            : 8th Feb 2013
+ *
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type TestType as open {
+id : int32,
+name : string,
+locn : point,
+zip : string
+}
+
+write output to nc1:"rttest/dml_drop-empty-secondary-indexes.adm";
+
+create dataset t1(TestType) primary key id;
+
+create index rtree_index_point on t1(locn) type rtree;
+
+create index keyWD_indx on t1(name) type keyword;
+
+create index secndIndx on t1(zip);
+
+drop index t1.rtree_index_point;
+
+drop index t1.keyWD_indx;
+
+drop index t1.secndIndx;
+
+for $l in dataset('Metadata.Index')
+where $l.IsPrimary=false
+return $l;
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/drop-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/drop-index.aql
index 0c87734..8b99487 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/drop-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/drop-index.aql
@@ -28,7 +28,7 @@
 string4:  string
 }
 
-create dataset t1(Schema) partitioned by key unique2;
+create dataset t1(Schema) primary key unique2;
 
 // Load data
 load dataset t1
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/empty-load-with-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/empty-load-with-index.aql
index 4f22e84..45ec5e6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/empty-load-with-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/empty-load-with-index.aql
@@ -29,7 +29,7 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 create index part_index on LineItem(l_partkey);
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/empty-load.aql b/asterix-app/src/test/resources/runtimets/queries/dml/empty-load.aql
index f9db086..63739d6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/empty-load.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/empty-load.aql
@@ -29,7 +29,7 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 create index part_index on LineItem(l_partkey);
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
index 00b60cb..586226c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
@@ -16,7 +16,7 @@
 }
 
 create dataset LineID(LineIDType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 create index idx_LineID_partkey on LineID(l_linenumber);
 create index idx_LineID_suppkey on LineID(l_suppkey);
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset.aql
index 253f5e7..58548b6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset.aql
@@ -16,7 +16,7 @@
 }
 
 create dataset LineID(LineIDType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 insert into dataset LineID (
 let $x:=1
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_01.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_01.aql
index 7a3cce2..761066e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_01.aql
@@ -9,7 +9,7 @@
 }
 
 create dataset LineID(LineIDType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineID 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_02.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_02.aql
index 04a5043..475e6ab 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_02.aql
@@ -28,10 +28,10 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 create dataset LineID(LineIDType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_01.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_01.aql
index 1f211e8..7ece2ab 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_01.aql
@@ -9,7 +9,7 @@
 }
 
 create dataset LineID(LineIDType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineID 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_02.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_02.aql
index ef38109..f05844b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_02.aql
@@ -21,9 +21,9 @@
 string4:  string
 }
 
-create dataset onektup(Schema) partitioned by key unique2;
+create dataset onektup(Schema) primary key unique2;
 
-create dataset tenktup1(Schema) partitioned by key unique2;
+create dataset tenktup1(Schema) primary key unique2;
 
 load dataset onektup 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
@@ -33,7 +33,7 @@
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
 (("path"="nc1://data/wisc/tenktup.adm"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 
-create dataset tmp(Schema) partitioned by key unique2;
+create dataset tmp(Schema) primary key unique2;
 
 load dataset tmp 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-src-dst-01.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-src-dst-01.aql
index c3dc7ba..7c055f7 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-src-dst-01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-src-dst-01.aql
@@ -20,8 +20,8 @@
   id: string
 }
 
-create dataset testds01(testtype01) partitioned by key id;
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
+create dataset testds02(testtype02) primary key id;
 
 insert into dataset testds01 ({ "id": "001" });
 insert into dataset testds01 ({ "id": "002", "name": "John Doe" });
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert.aql
index 2b4622f..a0d4d5b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert.aql
@@ -28,10 +28,10 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 create dataset LineID(LineIDType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert_less_nc.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert_less_nc.aql
index 7e133ae..c646c4c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert_less_nc.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert_less_nc.aql
@@ -28,10 +28,10 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 create dataset LineID(LineIDType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-from-hdfs.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-from-hdfs.aql
index 0411145..908f515 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/load-from-hdfs.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-from-hdfs.aql
@@ -22,7 +22,7 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-index.aql
index efab267..61b455c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-index.aql
@@ -22,7 +22,7 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 create index idx_partkey on LineItem(l_partkey);
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o-recursive.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o-recursive.aql
index 6c8264f..b2ec24f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o-recursive.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o-recursive.aql
@@ -30,9 +30,9 @@
   id: string
 }
 
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
 
-create dataset testds2(testtype2) partitioned by key id;
+create dataset testds2(testtype2) primary key id;
 
 insert into dataset testds (
 { "id": "001", "name": "Person One", "address": {"street": "3019 DBH",  "city": "Irvine", "zip": 92697}, "department": {{ {"name":"CS", "id":299, "review":5}, {"name":"EE", "id":399} }} }
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o.aql
index a63e2df..29d7d61 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o.aql
@@ -23,9 +23,9 @@
   id: string
 }
 
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
 
-create dataset testds2(testtype2) partitioned by key id;
+create dataset testds2(testtype2) primary key id;
  
 insert into dataset testds (
 { "hobby": {{"music", "coding"}}, "id": "001", "name": "Person Three"}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-closed-optional.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-closed-optional.aql
index 94991be..b510ba4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-closed-optional.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-closed-optional.aql
@@ -13,7 +13,7 @@
   id: string
 }
 
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
 
 insert into dataset testds (
 { "id": "001", "name": "Person One"}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert.aql
index 12248dd..9301ac5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert.aql
@@ -13,7 +13,7 @@
   name: string
 }
 
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
  
  insert into dataset testds (
  { "id": "001", "name": "Person Three", "hobbies": {{"scuba", "music"}}}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert2.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert2.aql
index 1903bf2..e673c57 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert2.aql
@@ -13,7 +13,7 @@
 	id:int32
 }
 
-create dataset testds(TestType) partitioned by key id;
+create dataset testds(TestType) primary key id;
 
 insert into dataset testds( for $i in range(1,10) return { "id":$i,"name":"John Doe" });
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-noexpand.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-noexpand.aql
index c6ff1cd..bc408be 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-noexpand.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-noexpand.aql
@@ -14,7 +14,7 @@
   id: string
 }
 
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
 
 insert into dataset testds (
 { "id": "001", "name": "Person One"}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c-recursive.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c-recursive.aql
index 2070e20..92d2bce 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c-recursive.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c-recursive.aql
@@ -31,9 +31,9 @@
   department: {{Dept}}?
 }
 
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
 
-create dataset testds2(testtype2) partitioned by key id;
+create dataset testds2(testtype2) primary key id;
 
 insert into dataset testds (
 { "id": "001", "name": "Person One", "address": {"street": "3019 DBH",  "city": "Irvine", "zip": 92697}, "department": {{ {"name":"CS", "id":299}, {"name":"EE", "id":399} }} }
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c.aql
index 015aa7b..ae69e17 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c.aql
@@ -21,9 +21,9 @@
   name: string
 }
 
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
 
-create dataset testds2(testtype2) partitioned by key id; 
+create dataset testds2(testtype2) primary key id; 
  
 insert into dataset testds (
 { "id": "001",  "hobby": {{"music"}}, "name": "Person Three"}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2o.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2o.aql
index 545790b..c836a69 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2o.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2o.aql
@@ -21,9 +21,9 @@
   hobby: string
 }
 
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
 
-create dataset testds2(testtype2) partitioned by key id;
+create dataset testds2(testtype2) primary key id;
  
 insert into dataset testds (
 { "name": "Person One",  "id": "001", "hobby": "music"}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/query-issue205.aql b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue205.aql
index e0a0695..3a2ad40 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/query-issue205.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue205.aql
@@ -21,7 +21,7 @@
 }
 
 create dataset Employees(EmployeeType)
-  partitioned by key id;
+  primary key id;
 
 insert into dataset Employees({"id":"1234", "stat":{ "age":50, "salary":120000}, "deptCode":32 });
 insert into dataset Employees({"id":"5678", "stat":{ "age":40, "salary":100000}, "deptCode":16 });
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-btree-secondary-index-nullable.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-btree-secondary-index-nullable.aql
index a35c8e5..36df10a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-btree-secondary-index-nullable.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-btree-secondary-index-nullable.aql
@@ -24,7 +24,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index-nullable.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index-nullable.aql
index fc30529..c44c3e8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index-nullable.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index-nullable.aql
@@ -21,7 +21,7 @@
 }
 
 create dataset MyData(MyRecord)
-  partitioned by key id;
+  primary key id;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index.aql
index 4e2ca6c..b6bc17b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index.aql
@@ -15,7 +15,7 @@
 }
 
 create dataset MyData(MyRecord)
-  partitioned by key id;
+  primary key id;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-btree-secondary-index-nullable.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-btree-secondary-index-nullable.aql
index d5d22d3..dde8892 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-btree-secondary-index-nullable.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-btree-secondary-index-nullable.aql
@@ -24,8 +24,8 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
-create dataset CustomersMini(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
+create dataset CustomersMini(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index-nullable.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index-nullable.aql
index 161439a..a3d38eb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index-nullable.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index-nullable.aql
@@ -26,14 +26,14 @@
 }
 
 create dataset MyData(MyRecord)
-  partitioned by key id;
+  primary key id;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
 (("path"="nc1://data/spatial/spatialDataNulls.json"),("format"="adm")) pre-sorted;
 
 create dataset MyMiniData(MyMiniRecord)
-  partitioned by key id;
+  primary key id;
 
 create index rtree_index_point on MyMiniData(point) type rtree;
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index.aql
index f1bc29d..0ed6488 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index.aql
@@ -20,14 +20,14 @@
 }
 
 create dataset MyData(MyRecord)
-  partitioned by key id;
+  primary key id;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
 (("path"="nc1://data/spatial/spatialData.json"),("format"="adm")) pre-sorted;
 
 create dataset MyMiniData(MyMiniRecord)
-  partitioned by key id;
+  primary key id;
 
 load dataset MyMiniData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql
index 465d94f..f883454 100644
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql
@@ -12,7 +12,7 @@
 }
 
 create dataset MyData(MyRecord)
-  partitioned by key id;
+  primary key id;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql
index cd4a922..7bf558b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql
@@ -23,7 +23,7 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree.aql
index 2ec32f4..9530df5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree.aql
@@ -17,7 +17,7 @@
 }
 
 create dataset MyData(MyRecord)
-  partitioned by key id;
+  primary key id;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
@@ -25,7 +25,7 @@
 
 
 create dataset MyMiniData(MyMiniRecord)
-  partitioned by key id;
+  primary key id;
 
 load dataset MyMiniData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert.aql
index a0ce842..0309b7c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/failure/insert.aql
@@ -29,10 +29,10 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 create dataset LineID(LineIDType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/q1_pricing_summary_report_failure.aql b/asterix-app/src/test/resources/runtimets/queries/failure/q1_pricing_summary_report_failure.aql
index a0c7f87..58db4b8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/failure/q1_pricing_summary_report_failure.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/failure/q1_pricing_summary_report_failure.aql
@@ -24,7 +24,7 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql
index d4dcd38..fe0e9e5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql
@@ -17,8 +17,8 @@
 
 create feed dataset TweetFeed(TweetType)
 using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
-(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
-partitioned by key id;
+(("output-type-name"="TweetType"),("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
+primary key id;
 
 write output to nc1:"rttest/feeds_feeds_01.adm";
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql
index 3129d63..d72e623 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql
@@ -19,7 +19,7 @@
 create feed dataset TweetFeed(TweetType)
 using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
 (("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
-partitioned by key id;
+primary key id;
 
 begin feed TweetFeed; 
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql
index a4b22d0..615ee83 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql
@@ -21,9 +21,9 @@
 
 create feed dataset TweetFeed(TweetType)
 using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
-(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+(("output-type-name"="TweetType"),("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
 apply function feed_processor@1
-partitioned by key id;
+primary key id;
 
 write output to nc1:"rttest/feeds_feeds_03.adm";
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql
index c38cfd2..bea88fb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql
@@ -20,7 +20,7 @@
 create feed dataset TweetFeed(TweetType)
 using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
 (("fs"="hdfs"),("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/obamatweets.adm"),("format"="adm"),("input-format"="text-input-format"),("output-type-name"="TweetType"),("tuple-interval"="10"))
-partitioned by key id;
+primary key id;
 
 begin feed TweetFeed; 
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql
index a7dc4fa..a9f3236 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql
@@ -19,7 +19,7 @@
 create feed dataset TweetFeed(TweetType)
 using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
 (("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
-partitioned by key id;
+primary key id;
 
 begin feed feeds.TweetFeed; 
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_1.aql
index 8438ae9..ffabe64 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_1.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.1.aql
index d575424..a74bc3e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.1.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.aql
index f0b43d1..53771c1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.aql
index a96e933..923959b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.1_5.3.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.1_5.3.1.aql
index f424166..3c7bb5b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.1_5.3.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.1_5.3.1.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.2.aql
index 05304c5..d0eebcb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.2.aql
@@ -17,8 +17,8 @@
   rank: int32
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset TOKENSRANKEDADM(TOKENSRANKEDADMType) partitioned by key rank;
+create dataset DBLP(DBLPType) primary key id;
+create dataset TOKENSRANKEDADM(TOKENSRANKEDADMType) primary key rank;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_1.aql
index be6073b..b5ae4c6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_1.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_2.aql
index 9d31f4a..d50b67c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_2.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_3.aql
index 4bc6836..2e57a79 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_3.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_4.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_4.aql
index 149a9b4..7f59c30 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_4.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_4.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.1.aql
index 90e1b15..4f53db6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.1.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.2.aql
index 2771589..67bd952 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.2.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.1.aql
index 57cdab4..9944c93 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.1.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.aql
index e4e8cfd..0319117 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.aql
index 8d3c4c3..9a10925 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.1.aql
index e65952d..2de6807 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.1.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.2.aql
index 4a98706..a3b5bc5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.2.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.aql
index d998928..26f5e46 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_1.aql
index fa0bfac..dd88883 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_1.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_2.aql
index 7bf7d5f..abd3425 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_2.aql
@@ -15,7 +15,7 @@
   id: int32
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_1.aql
index a899a80..288194c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_1.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_2.aql
index 4c30bb7..0010b8b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_2.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_3.aql
index 5c49fd2..aa3c661 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_3.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_4.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_4.aql
index fa8501c..2aa331c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_4.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_4.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.1.aql
index 97d163f..a8f5eec 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.1.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.2.aql
index 0329a87..d724b57 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.2.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.1.aql
index 9d9c141..57dbb53 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.1.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 
 load dataset DBLP
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.aql
index 11d4327..fa9f11c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.aql
index 603077e..40fd306 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_1.aql
index 2a3f6ec..59a881b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_1.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_2.aql
index e942aa0..9352c3b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_2.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_3.aql
index dbbc1b1..c1c6322 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_3.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_4.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_4.aql
index a703037..75fe12b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_4.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_4.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.1.aql
index c6d61e2..bfed5c6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.1.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.2.aql
index f881151..b683acb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.2.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.1.aql
index b0884f2..3134c41 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.1.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.aql
index 46bd31d..97e16c9 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.1.aql
index c8ed34b..175e4b1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.1.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.aql
index e4eab9d..e471d69 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.aql
index 55d508b..588591d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_1.aql
index 30445bf..0195185 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_1.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_2.aql
index 01ee281..085d139 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_2.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_3.aql
index 4439af1..d379cc4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_3.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-dblp-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-dblp-aqlplus_1.aql
index 77e32a2..cb774be 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-dblp-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-dblp-aqlplus_1.aql
@@ -20,8 +20,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-lookup_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-lookup_1.aql
index 274f0c2..edec926 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-lookup_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-lookup_1.aql
@@ -12,7 +12,7 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
 load dataset DBLP
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/events-users-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/events-users-aqlplus_1.aql
index 500454f..b31ba58 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/events-users-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/events-users-aqlplus_1.aql
@@ -22,7 +22,7 @@
 }}
 }
 
-create dataset User(UserType) partitioned by key name;
+create dataset User(UserType) primary key name;
 
 load dataset User
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_1.aql
index 7598987..af327b4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_1.aql
@@ -9,7 +9,7 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_2.aql
index 5b3c828..2d81ddd 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_2.aql
@@ -9,7 +9,7 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_3.aql
index c131c75..9785f52 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_3.aql
@@ -9,7 +9,7 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.1.aql
index 6a6248f..bdcaa36 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.1.aql
@@ -9,7 +9,7 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.aql
index 75eb9c8..db833d0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.aql
@@ -9,7 +9,7 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_2.aql
index 39168c6..9f6e758 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_2.aql
@@ -9,7 +9,7 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_3.aql
index c9e4ece..e438ec8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_3.aql
@@ -9,7 +9,7 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-3_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-3_1.aql
index fa63e8e..6dd9cbc 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-3_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-3_1.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_1.aql
index 5cea93e..40fa07b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_1.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_2.aql
index 6ec3fc6..e56c48c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_2.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_3.aql
index baf6d12..01c172e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_3.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-vis-user-lot-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-vis-user-lot-aqlplus_1.aql
index ff94813..0746be5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-vis-user-lot-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-vis-user-lot-aqlplus_1.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-3_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-3_1.aql
index 9f2fd07..6654d07 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-3_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-3_1.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_1.aql
index 1cfaccf..a289015 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_1.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_2.aql
index b5eb560..59b7c9a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_2.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_3.aql
index 5b557ee..861db2f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_3.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_4.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_4.aql
index 1353960..49ac082 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_4.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_4.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_5.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_5.aql
index 58cc81f..08e8bf8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_5.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_5.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_1.aql
index 01ad330..4679eca 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_1.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_2.aql
index 35fde70..669284c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_2.aql
@@ -18,8 +18,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 load dataset Users 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql
index 7a0494f..8271f1d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql
@@ -13,7 +13,7 @@
 };
 
 create external dataset TextDataset(LineType)
-using "hdfs"
+using hdfs
 (("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/textFileS"),("input-format"="sequence-input-format"),("format"="delimited-text"),("delimiter"="."));
 
 write output to nc1:"rttest/hdfs_hdfs_02.adm";
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql
index fc5b3ab..4ddf511 100644
--- a/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql
@@ -15,7 +15,7 @@
 };
 
 create external dataset TextDataset(LineType)
-using "hdfs"
+using hdfs
 (("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/large_text"),("input-format"="text-input-format"),("format"="delimited-text"),("delimiter"="."));
 
 write output to nc1:"rttest/hdfs_hdfs_03.adm";
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql
index c2a0963..4a42a20 100644
--- a/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql
@@ -14,7 +14,7 @@
 };
 
 create external dataset TextDataset(LineType)
-using "hdfs"
+using hdfs
 (("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/asterix_info.txt"),("input-format"="text-input-format"),("format"="delimited-text"),("delimiter"="."));
 
 write output to nc1:"rttest/hdfs_issue_245_hdfs.adm";
diff --git a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_5.aql b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_5.aql
new file mode 100644
index 0000000..544b37c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_5.aql
@@ -0,0 +1,35 @@
+/*
+* Description  : Create an  dataset and load it from two file splits 
+                 Use hint (cardinality) for the created dataset.
+* Expected Res : Success
+* Date         : 30th Jan 2013
+*/
+
+/* scan and print an ADM file as a dataset of closed records */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type DBLPType as closed {
+  id: int32, 
+  dblpid: string,
+  title: string,
+  authors: string,
+  misc: string
+}
+
+create dataset DBLPadm(DBLPType) 
+primary key id
+hints(cardinality=200);
+
+// drop dataset DBLPadm;
+load dataset DBLPadm 
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter" 
+(("path"="nc1://data/dblp-small/part-00000.adm,nc1://data/dblp-small/part-00001.adm"),("format"="adm"));
+
+write output to nc1:"rttest/hints_issue_251_dataset_hint_5.adm";
+
+for $paper in dataset('DBLPadm')
+order by $paper.id
+return $paper
diff --git a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_6.aql b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_6.aql
new file mode 100644
index 0000000..0b7c20d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_6.aql
@@ -0,0 +1,28 @@
+/*
+* Description  : Create an external dataset that contains a tuples, the lines from a (*sequence*) file in HDFS.
+                 Provide hint(cardinality) when creating the dataset.
+                 Perform a word-count over the data in the dataset.
+* Expected Res : Success
+* Date         : 30th Jan 2013
+*/
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as closed {
+ content: string
+};
+
+create external dataset TextDataset(LineType)
+using hdfs
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/textFileS"),("input-format"="sequence-input-format"),("format"="delimited-text"),("delimiter"="."))
+hints(cardinality=10);
+
+write output to nc1:"rttest/hints_issue_251_dataset_hint_6.adm";
+
+for $line in dataset('TextDataset')
+let $tokens := word-tokens($line.content)
+for $token in $tokens
+group by $tok := $token with $token
+order by $tok
+return { "word": $tok, "count": count($token) }
diff --git a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7.aql b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7.aql
new file mode 100644
index 0000000..8f4a74d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7.aql
@@ -0,0 +1,33 @@
+/*
+ * Description  : Create a feed dataset that uses the feed simulator adapter. 
+                  Use hint (cardinality) for the feed dataset.
+                  Begin ingestion using a fully qualified name and verify contents of the dataset post completion.  
+ * Expected Res : Success
+ * Date         : 30th Jan 2013
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+  id: string,
+  username : string,
+  location : string,
+  text : string,
+  timestamp : string
+}      
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+primary key id
+hints(cardinality=200);
+
+begin feed feeds.TweetFeed; 
+
+write output to nc1:"rttest/hints_issue_251_dataset_hint_7.adm";
+
+for $x in dataset('TweetFeed')
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-join/btree-primary-equi-join.aql b/asterix-app/src/test/resources/runtimets/queries/index-join/btree-primary-equi-join.aql
index 1015e82..0800784 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-join/btree-primary-equi-join.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-join/btree-primary-equi-join.aql
@@ -37,8 +37,8 @@
   items: [int32]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
-create dataset Orders(OrderType) partitioned by key oid;
+create dataset Customers(CustomerType) primary key cid;
+create dataset Orders(OrderType) primary key oid;
 
 load dataset Customers
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-join/btree-secondary-equi-join.aql b/asterix-app/src/test/resources/runtimets/queries/index-join/btree-secondary-equi-join.aql
index d1e9824..1bf3490 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-join/btree-secondary-equi-join.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-join/btree-secondary-equi-join.aql
@@ -25,8 +25,8 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-join/rtree-spatial-intersect-point.aql b/asterix-app/src/test/resources/runtimets/queries/index-join/rtree-spatial-intersect-point.aql
index ab79189..58fbf73 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-join/rtree-spatial-intersect-point.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-join/rtree-spatial-intersect-point.aql
@@ -21,8 +21,8 @@
   circle: circle
 }
 
-create dataset MyData1(MyRecord) partitioned by key id;
-create dataset MyData2(MyRecord) partitioned by key id;
+create dataset MyData1(MyRecord) primary key id;
+create dataset MyData2(MyRecord) primary key id;
 
 load dataset MyData1
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-composite-key.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-composite-key.aql
index dadb884..248c8ba 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-composite-key.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-composite-key.aql
@@ -19,7 +19,7 @@
 dept:string
 }
 
-create dataset employee(Emp) partitioned by key id;
+create dataset employee(Emp) primary key id;
 
 load dataset employee
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-rewrite-multiple.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-rewrite-multiple.aql
index 7b72a80..e3214de 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-rewrite-multiple.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-rewrite-multiple.aql
@@ -22,7 +22,7 @@
   o_comment: string
 }
 
-create dataset Orders(OrderType) partitioned by key o_orderkey;
+create dataset Orders(OrderType) primary key o_orderkey;
 
 load dataset Orders 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/cust-index-age-nullable.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/cust-index-age-nullable.aql
index 460a212..8fb416b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/cust-index-age-nullable.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/cust-index-age-nullable.aql
@@ -17,7 +17,7 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-contains.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-contains.aql
index ad85be7..e9b9540 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-contains.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-contains.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance-panic.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance-panic.aql
index b275423..3e637ed 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance-panic.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance-panic.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance.aql
index ddcdd4b..1fca0d4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-jaccard.aql
index 501ebce..1b8167b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-jaccard.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance-panic.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance-panic.aql
index deb51a7..79e728f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance-panic.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance-panic.aql
@@ -20,7 +20,7 @@
 create nodegroup group1 if not exists on nc1;
 
 create dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance.aql
index bb05fc1..793d578 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance.aql
@@ -20,7 +20,7 @@
 create nodegroup group1 if not exists on nc1;
 
 create dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-jaccard.aql
index 8e2d1e7..aacb656 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-jaccard.aql
@@ -20,7 +20,7 @@
 create nodegroup group1 if not exists on nc1;
 
 create dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ulist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ulist-jaccard.aql
index 6a0e266..a6ea348 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ulist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ulist-jaccard.aql
@@ -20,7 +20,7 @@
 create nodegroup group1 if not exists on nc1;
 
 create dataset Customers(CustomerType) 
-  partitioned by key cid on group1;
+  primary key cid on group1;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-contains.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-contains.aql
index 348f686..f873dbb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-contains.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-contains.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-jaccard.aql
index 9852e67..051f4f8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-jaccard.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive-open.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive-open.aql
index 1a0ecbc..174c50f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive-open.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive-open.aql
@@ -15,7 +15,7 @@
 }
 
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 
 load dataset Orders 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter" 
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive.aql
index ceca42e..6e4e659 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive.aql
@@ -15,7 +15,7 @@
 }
 
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 
 load dataset Orders 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter" 
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-open.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-open.aql
index 281f566..50088f4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-open.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-open.aql
@@ -18,7 +18,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 
 load dataset Orders 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey.aql
index 365cfcb..9a950aa 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey.aql
@@ -15,7 +15,7 @@
 }
 
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 
 load dataset Orders 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search-open.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search-open.aql
index 099e2d2..9a1906c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search-open.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search-open.aql
@@ -23,7 +23,7 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter" 
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search.aql
index 62714ed..758e110 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search.aql
@@ -23,7 +23,7 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter" 
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-nullable.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-nullable.aql
index 6ca6d28..d920cfce 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-nullable.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-nullable.aql
@@ -14,7 +14,7 @@
 }
 
 create dataset MyData(MyRecord)
-  partitioned by key id;
+  primary key id;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter" 
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-open.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-open.aql
index 44425cc..0a49ac1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-open.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-open.aql
@@ -14,7 +14,7 @@
 }
 
 create dataset MyData(MyRecord)
-  partitioned by key id;
+  primary key id;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter" 
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index.aql
index c1e1890..19df2db 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index.aql
@@ -15,7 +15,7 @@
 }
 
 create dataset MyData(MyRecord)
-  partitioned by key id;
+  primary key id;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter" 
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql
index 0ea267c..c6e29e6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql
@@ -25,9 +25,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql
index f7e3a8b..46cd05c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql
index 734a269..f2f330e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql
@@ -26,9 +26,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql
index 2e1a635..1df7a5b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql
@@ -25,9 +25,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql
index 3b46c7d..a72d292 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql
@@ -25,9 +25,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql
index 3f025ed..ea05e33 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql
index ea28721..e8b4b05 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql
@@ -25,9 +25,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard.aql
index 458d31c..d33fc87 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql
index e11b2f0..c46d534 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql
@@ -25,9 +25,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql
index 9732a51..c7534f3 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql
index 1985878..ebec1d8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql
@@ -26,9 +26,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard.aql
index 013b51e..29e17bc 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard.aql
@@ -25,9 +25,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance-inline.aql
index a602ca1..75ebe7b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance-inline.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance.aql
index 1c88536..9c305ec 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard-inline.aql
index cd88072..048aee8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard-inline.aql
@@ -25,9 +25,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard.aql
index abb5e33..96c685d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance-inline.aql
index bdac6f1..41e1d9e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance-inline.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance.aql
index 5e679e4..64e16ba 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
 
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard-inline.aql
index 8fd8632..ef2730b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard-inline.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard.aql
index 50d13f1..d8af52c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard-inline.aql
index a62c66d..c6b2252 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard-inline.aql
@@ -24,9 +24,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard.aql
index 8c6570f..efdb925 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard.aql
@@ -23,9 +23,9 @@
   children: [ { name: string, age: int32? } ]
 }
 
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
   
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
 
 load dataset Customers 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard-inline.aql
index 3ac3583..b50a92b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard-inline.aql
@@ -25,9 +25,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard.aql
index 7060fe6..6063e0b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard.aql
@@ -24,9 +24,9 @@
   misc: string
 }
 
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
 
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql b/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql
index 9b25210..2127117 100644
--- a/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql
@@ -14,10 +14,10 @@
 }
 
 create dataset onektup(Schema) 
-partitioned by key id;
+primary key id;
 
 load dataset onektup 
-using "localfs"(("path"="nc1:///tmp/one.adm"));
+using localfs(("path"="nc1:///tmp/one.adm"));
 
 write output to nc1:"/tmp/foo.adm";
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/misc/nested-loop-join_01.aql b/asterix-app/src/test/resources/runtimets/queries/misc/nested-loop-join_01.aql
index f148d2e..a2e2c1d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/misc/nested-loop-join_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/misc/nested-loop-join_01.aql
@@ -16,8 +16,8 @@
   interests: {{string}}
 }
 
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
 
 
 load dataset Users 
diff --git a/asterix-app/src/test/resources/runtimets/queries/nestrecords/nestrecord.aql b/asterix-app/src/test/resources/runtimets/queries/nestrecords/nestrecord.aql
index c0b5e06..e038aeb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/nestrecords/nestrecord.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/nestrecords/nestrecord.aql
@@ -20,7 +20,7 @@
   address: AddressType?
 }
 
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
 
 insert into dataset testds (
 { "id": "001", "name": "Person One", "address": {"street": "3019 DBH",  "city": "Irvine", "zip": 92697} }
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-w-optional.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-w-optional.aql
index 354757d..c5fb80c1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-w-optional.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-w-optional.aql
@@ -18,9 +18,9 @@
 optnl_fld:string?
 }
 
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
 
-create dataset T2(TestType) partitioned by key id;
+create dataset T2(TestType) primary key id;
 
 insert into dataset T1({
 "id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-wo-optional.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-wo-optional.aql
index 9d4b90a..b31fe88 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-wo-optional.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-wo-optional.aql
@@ -19,9 +19,9 @@
 optnl_fld:string?
 }
 
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
 
-create dataset T2(TestType) partitioned by key id;
+create dataset T2(TestType) primary key id;
 
 insert into dataset T1({
 "id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c.aql
index 83888f3..9f923e7 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c.aql
@@ -17,10 +17,10 @@
 }
 
 // source dataset
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
 
 // target dataset
-create dataset T2(TestType) partitioned by key id;
+create dataset T2(TestType) primary key id;
 
 insert into dataset T1({
 "id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list-ordered01.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list-ordered01.aql
index 27935e0..b51c1fe 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list-ordered01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list-ordered01.aql
@@ -22,7 +22,7 @@
 batters:[[BatterType]]
 }
 
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
 
 insert into dataset T1({
 "id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list01.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list01.aql
index 7ba1641..82823ce 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list01.aql
@@ -22,7 +22,7 @@
 batters:{{BatterType}}
 }
 
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
 
 insert into dataset T1({
 "id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list02.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list02.aql
index 47dd921..45e4bac 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list02.aql
@@ -22,7 +22,7 @@
 batters:[[BatterType]]
 }
 
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
 
 insert into dataset T1({
 "id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list03.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list03.aql
index 94a87c0..94faa44 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list03.aql
@@ -23,7 +23,7 @@
 batters:[[BatterType]]?
 }
 
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
 
 insert into dataset T1({
 "id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-01.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-01.aql
index 0d26b5d..55d0497 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-01.aql
@@ -16,7 +16,7 @@
 name : string
 }
 
-create dataset testds(testType) partitioned by key id;
+create dataset testds(testType) primary key id;
 
 
 insert into dataset testds({"id": 123, "name": "John Doe", "hobbies": {{ "scuba", "music" }} }
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-12.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-12.aql
index ae21794..087ca36 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-12.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-12.aql
@@ -22,9 +22,9 @@
 name : string
 }
 
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
 
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
 
 insert into dataset testds02 (
 { "id": "001", "name": "Person One", "hobbies": {{"scuba", "music"}}}
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-14.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-14.aql
index 6c517ca..7038437 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-14.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-14.aql
@@ -19,9 +19,9 @@
   id: string  
 }
 
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
 
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
 
 insert into dataset testds01 (
 { "id": "001" }
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-15.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-15.aql
index 2ac2567..d3547e2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-15.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-15.aql
@@ -26,7 +26,7 @@
 lat_lon: point
 }
 
-create dataset tdtst(Schema) partitioned by key id_32; 
+create dataset tdtst(Schema) primary key id_32; 
 
 insert into dataset tdtst(
 let $f1:=time("10:50:56:200+05:00")
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-16.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-16.aql
index 5e1a591..d07fce0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-16.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-16.aql
@@ -26,7 +26,7 @@
 lat_lon: point
 }
 
-create dataset tdtst(Schema) partitioned by key id_32; 
+create dataset tdtst(Schema) primary key id_32; 
 
 insert into dataset tdtst(
 let $f1:=time("10:50:56:200+05:00")
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-17.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-17.aql
index a590cf1..da83e0f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-17.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-17.aql
@@ -22,7 +22,7 @@
 lat_lon: point
 }
 
-create dataset tdtst(Schema) partitioned by key id_32; 
+create dataset tdtst(Schema) primary key id_32; 
 
 insert into dataset tdtst(
 let $f1:=time("10:50:56:200+05:00")
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-19.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-19.aql
index b406663..1d93992 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-19.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-19.aql
@@ -15,7 +15,7 @@
 id:int32
 }
 
-create dataset dtst01(TestType) partitioned by key id;
+create dataset dtst01(TestType) primary key id;
 
 insert into dtst01({"id":137});
 insert into dtst01({"id":117});
@@ -30,7 +30,7 @@
 dob:date
 }
 
-create dataset employee(Emp) partitioned by key id;
+create dataset employee(Emp) primary key id;
 
 insert into dataset employee({"id":201,"name":"John Doe","age":32,"sex":"M","dob":"1-1-82"});
 insert into dataset employee({"id":202,"name":"John Smith","age":30,"sex":"M","dob":"1-1-82"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-20.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-20.aql
index 29b4366..7f1be27 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-20.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-20.aql
@@ -16,7 +16,7 @@
 id:int32
 }
 
-create dataset dtst01(TestType) partitioned by key id;
+create dataset dtst01(TestType) primary key id;
 
 create type Emp as open {
 id:int32,
@@ -26,7 +26,7 @@
 dob:date?
 }
 
-create dataset employee(Emp) partitioned by key id;
+create dataset employee(Emp) primary key id;
 
 insert into dataset employee({"id":201,"name":"John Doe","age":32,"sex":"M","dob":date("1975-01-11")});
 insert into dataset employee({"id":202,"name":"John Smith","age":30,date("1982-05-23")});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-21.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-21.aql
index c52344e..3d955c8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-21.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-21.aql
@@ -14,7 +14,7 @@
 id:int32
 }
 
-create dataset dtst01(TestType) partitioned by key id;
+create dataset dtst01(TestType) primary key id;
 
 create type Emp as open {
 id:int32,
@@ -24,7 +24,7 @@
 dob:date
 }
 
-create dataset employee(Emp) partitioned by key id;
+create dataset employee(Emp) primary key id;
 
 insert into dataset employee({"id":201,"name":"John Doe","age":32,"sex":"M","dob":date("1975-01-11")});
 insert into dataset employee({"id":202,"name":"John Smith","age":30,"sex":"M","dob":date("1982-07-12")});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-22.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-22.aql
index cd9ac44..0b4c31f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-22.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-22.aql
@@ -20,7 +20,7 @@
 dob:date?
 }
 
-create dataset employee(Emp) partitioned by key id;
+create dataset employee(Emp) primary key id;
 
 //date("YYYY-MM-DD")
 insert into dataset employee({"id":201,"name":"John Doe","age":37,"dept":"HR","sex":"M","dob":date("1975-11-02")});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-24.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-24.aql
index 7012a5a..4123cf2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-24.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-24.aql
@@ -17,7 +17,7 @@
 opt_tag : {{ string }}
 }
 
-create dataset testds(testType) partitioned by key id;
+create dataset testds(testType) primary key id;
 
 insert into dataset testds({"id": 32,"name": "UCI","opt_tag":{{"optional text","put any text here","and more"}}});
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-25.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-25.aql
index b29b63a..ec0d4cd 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-25.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-25.aql
@@ -17,7 +17,7 @@
 opt_tag : {{ string }}?
 }
 
-create dataset testds(testType) partitioned by key id;
+create dataset testds(testType) primary key id;
 
 insert into dataset testds({"id": 32,"name": "UCI","opt_tag":{{"optional text","put any text here","and more"}}});
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-26.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-26.aql
index f19e4a7..486b76e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-26.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-26.aql
@@ -18,7 +18,7 @@
 opt_tag : {{ string }}?
 }
 
-create dataset testds(testType) partitioned by key id;
+create dataset testds(testType) primary key id;
 
 insert into dataset testds({"id": 32,"name": "UCI"});
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-28.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-28.aql
index 8fdab75..acef21e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-28.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-28.aql
@@ -20,9 +20,9 @@
 name : string
 }
 
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
 
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
 
 insert into dataset testds02 (
 { "id": "001", "name": "Person One", "hobbies": {{"scuba", "music"}}}
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-29.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-29.aql
index 9560430..5047342 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-29.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-29.aql
@@ -20,9 +20,9 @@
 name : string
 }
 
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
 
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
 
 insert into dataset testds02 (
 { "id": "001", "name": "Person One", "hobbies": {{"scuba", "music"}}}
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-30.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-30.aql
index 42aa2e6..e52be74 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-30.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-30.aql
@@ -19,9 +19,9 @@
 name : string
 }
 
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
 
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
 
 insert into dataset testds02 (
 { "id": "011", "name": "John Doe", "hobbies": {{"scuba", "music"}}}
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-31.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-31.aql
index 03b1754..5d5fa33 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-31.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-31.aql
@@ -18,9 +18,9 @@
 name : string
 }
 
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
 
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
 
 insert into dataset testds02 (
 { "id": "011", "name": "John Doe", "hobbies": {{"scuba", "music"}}}
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-32.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-32.aql
index f101e47..a79c53d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-32.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-32.aql
@@ -23,9 +23,9 @@
 interests : {{string}}
 }
 
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
 
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
 
 insert into dataset testds02 (
 { "id": "011", "name": "John Doe", "sex":"Male", "dept":"HR", "salary":80000,"interests":{{"hiking","scuba","painting","biking"}}});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-33.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-33.aql
index b75e6c6..809c93e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-33.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-33.aql
@@ -24,9 +24,9 @@
 interests : {{string}}?
 }
 
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
 
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
 
 insert into dataset testds02 (
 { "id": "011", "name": "John Doe", "sex":"Male", "dept":"HR", "salary":80000,"interests":{{"hiking","scuba","painting","biking"}}});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue208.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue208.aql
index e46286c..2094b4a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue208.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue208.aql
@@ -26,7 +26,7 @@
 }
 
 create dataset TweetMessages(TweetMessageType)
-partitioned by key tweetid;
+primary key tweetid;
 
 load dataset TweetMessages
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue236.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue236.aql
index 3d8ab69..7e859af 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue236.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue236.aql
@@ -31,7 +31,7 @@
 }
 
 create dataset TweetMessages(TweetMessageType)
-partitioned by key tweetid;
+primary key tweetid;
 
 
 insert into dataset TweetMessages(
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal.aql
index a8d00f8..2668430 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal.aql
@@ -26,7 +26,7 @@
 };
 
 create dataset TweetMessages(TweetMessageType)
-partitioned by key tweetid;
+primary key tweetid;
 
 insert into dataset TweetMessages(
    {
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal02.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal02.aql
index 36feac4..b3945a0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal02.aql
@@ -27,7 +27,7 @@
 };
 
 create dataset TweetMessages(TweetMessageType)
-partitioned by key tweetid;
+primary key tweetid;
 
 insert into dataset TweetMessages(
    {
diff --git a/asterix-app/src/test/resources/runtimets/queries/quantifiers/somesat_02.aql b/asterix-app/src/test/resources/runtimets/queries/quantifiers/somesat_02.aql
index 18f1c20..6718ff0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/quantifiers/somesat_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/quantifiers/somesat_02.aql
@@ -30,9 +30,9 @@
 }
 
 create dataset CustomerSomeSat02(CustomerType)
-  partitioned by key cid;
+  primary key cid;
 create dataset OrdersSomeSat02(OrderType)
-  partitioned by key oid;
+  primary key oid;
 
 load dataset CustomerSomeSat02 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter" 
diff --git a/asterix-app/src/test/resources/runtimets/queries/records/closed-closed-fieldname-conflict_issue173.aql b/asterix-app/src/test/resources/runtimets/queries/records/closed-closed-fieldname-conflict_issue173.aql
new file mode 100644
index 0000000..b356a56
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/records/closed-closed-fieldname-conflict_issue173.aql
@@ -0,0 +1,11 @@
+/*
+ * Description    : Tests whether a conflict between two closed field names are detected
+ * Expected Result: An error reporting that there is a duplicate field name "name"
+ * Author: zheilbron
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+let $x := {"name": "john", "name": "smith"}
+return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/records/field-access-on-open-field.aql b/asterix-app/src/test/resources/runtimets/queries/records/field-access-on-open-field.aql
index 2592c67..1b7cf02 100644
--- a/asterix-app/src/test/resources/runtimets/queries/records/field-access-on-open-field.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/records/field-access-on-open-field.aql
@@ -13,7 +13,7 @@
   name : string
 }
 
-create dataset testds(TestType) partitioned by key id;
+create dataset testds(TestType) primary key id;
 
 insert into dataset testds({"id": 123, "name": "John Doe", "address": { "zip": 92617} });
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/records/open-closed-fieldname-conflict_issue173.aql b/asterix-app/src/test/resources/runtimets/queries/records/open-closed-fieldname-conflict_issue173.aql
new file mode 100644
index 0000000..a5ac400
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/records/open-closed-fieldname-conflict_issue173.aql
@@ -0,0 +1,21 @@
+/*
+ * Description    : Tests whether a conflict between an open and closed field name are detected
+ * Expected Result: An error reporting that there is a duplicate field name "name"
+ * Author: zheilbron
+ */
+ 
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type opentype as open {
+id:int32,
+fname:string
+}
+
+create dataset testds(opentype) primary key id; 
+
+insert into dataset testds({'id': 1, 'fname': "name"});
+
+for $x in dataset('testds')
+return {$x.fname: "smith", lowercase("NAME"): "john"}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/records/open-open-fieldname-conflict_issue173.aql b/asterix-app/src/test/resources/runtimets/queries/records/open-open-fieldname-conflict_issue173.aql
new file mode 100644
index 0000000..225e596
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/records/open-open-fieldname-conflict_issue173.aql
@@ -0,0 +1,21 @@
+/*
+ * Description    : Tests whether a conflict between two open field names are detected
+ * Expected Result: An error reporting that there is a duplicate field name "name"
+ * Author: zheilbron
+ */
+ 
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type opentype as open {
+fname1: string,
+fname2: string
+}
+
+create dataset testds(opentype) primary key fname1; 
+
+insert into dataset testds({'fname1': "name", 'fname2': "name"});
+
+for $x in dataset('testds')
+return {$x.fname1: "john", $x.fname2: "smith"}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/10.aql b/asterix-app/src/test/resources/runtimets/queries/scan/10.aql
index 2d71d87..b255178 100644
--- a/asterix-app/src/test/resources/runtimets/queries/scan/10.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/10.aql
@@ -12,7 +12,7 @@
 }
 
 create dataset DBLP1(DBLPType) 
-  partitioned by key id;
+  primary key id;
 
 // drop dataset DBLP1;
 load dataset DBLP1
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/20.aql b/asterix-app/src/test/resources/runtimets/queries/scan/20.aql
index 675f26d..17da0b0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/scan/20.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/20.aql
@@ -13,7 +13,7 @@
 }
 
 create dataset DBLPadm(DBLPType) 
-  partitioned by key id;
+  primary key id;
 
 // drop dataset DBLPadm;
 load dataset DBLPadm 
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql
index ff04a36..86c8430 100644
--- a/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql
@@ -21,7 +21,7 @@
 }
 
 create dataset DBLPadm(DBLPType) 
-  partitioned by key id;
+  primary key id;
 
 // drop dataset DBLPadm;
 load dataset DBLPadm 
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql
index 297e2f2..7dd319a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql
@@ -21,7 +21,7 @@
 }
 
 create dataset DBLPadm(DBLPType) 
-  partitioned by key id;
+  primary key id;
 
 // drop dataset DBLPadm;
 load dataset DBLPadm 
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/spatial_types_02.aql b/asterix-app/src/test/resources/runtimets/queries/scan/spatial_types_02.aql
index ea06b96..0997e4f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/scan/spatial_types_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/spatial_types_02.aql
@@ -12,7 +12,7 @@
 }
 
 create dataset Spatial2(SpatialType) 
-	partitioned by key id;
+	primary key id;
 
 load dataset Spatial2 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/temp_types_02.aql b/asterix-app/src/test/resources/runtimets/queries/scan/temp_types_02.aql
index 064385c..a8654bd 100644
--- a/asterix-app/src/test/resources/runtimets/queries/scan/temp_types_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/temp_types_02.aql
@@ -11,7 +11,7 @@
 }
 
 create dataset Temp2(TempType) 
-	partitioned by key id;
+	primary key id;
 
 load dataset Temp2 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-edit-distance.aql
index d3eed71..3d6dee4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-edit-distance.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-similarity-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-similarity-jaccard.aql
index 0c11edb..1bef743 100644
--- a/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-similarity-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-similarity-jaccard.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-check_query.aql b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-check_query.aql
index 01bea0b..836fdb6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-check_query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-check_query.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted-check_query.aql b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted-check_query.aql
index aa5b067..ba3b85e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted-check_query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted-check_query.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted_query.aql b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted_query.aql
index a2373af..25cb071 100644
--- a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted_query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted_query.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard_query.aql b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard_query.aql
index 05f3a61..20afc67 100644
--- a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard_query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard_query.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset DBLP(DBLPType) 
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset DBLP 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/spatial/cell-aggregation-with-filtering.aql b/asterix-app/src/test/resources/runtimets/queries/spatial/cell-aggregation-with-filtering.aql
index 0face3b..81b8bd2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/spatial/cell-aggregation-with-filtering.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/spatial/cell-aggregation-with-filtering.aql
@@ -13,7 +13,7 @@
 create nodegroup group1 if not exists on nc1, nc2;
 
 create dataset TwitterData(Tweet)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 
 load dataset TwitterData 
diff --git a/asterix-app/src/test/resources/runtimets/queries/spatial/create-rtree-index.aql b/asterix-app/src/test/resources/runtimets/queries/spatial/create-rtree-index.aql
index e755aa9..3fe1355 100644
--- a/asterix-app/src/test/resources/runtimets/queries/spatial/create-rtree-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/spatial/create-rtree-index.aql
@@ -17,7 +17,7 @@
   circle: circle
 }
 
-create dataset MyData(SpatialType) partitioned by key id;
+create dataset MyData(SpatialType) primary key id;
 create index rtree_index1 on MyData(point) type rtree;
 create index rtree_index2 on MyData(line1) type rtree;
 create index rtree_index3 on MyData(poly1) type rtree;
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/cpttostr01.aql b/asterix-app/src/test/resources/runtimets/queries/string/cpttostr01.aql
index a66b00f..7516bea 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/cpttostr01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/cpttostr01.aql
@@ -16,7 +16,7 @@
 cpt:[int32]
 }
 
-create dataset testds(TestType) partitioned by key id;
+create dataset testds(TestType) primary key id;
 
 // insert codepoint data into internal dataset testds here into the cpt attribute
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/endwith03.aql b/asterix-app/src/test/resources/runtimets/queries/string/endwith03.aql
index 832efbc..257d992 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/endwith03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/endwith03.aql
@@ -15,7 +15,7 @@
 name:string
 }
 
-create dataset testds(TestType) partitioned by key name;
+create dataset testds(TestType) primary key name;
 
 insert into dataset testds({"name":"Jim Jones"});
 insert into dataset testds({"name":"Ravi Kumar"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/matches05.aql b/asterix-app/src/test/resources/runtimets/queries/string/matches05.aql
index 2f7b83e..4d9e5b2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/matches05.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/matches05.aql
@@ -16,7 +16,7 @@
 id:int32
 }
 
-create dataset testds1(TestType1) partitioned by key id;
+create dataset testds1(TestType1) primary key id;
 
 insert into dataset testds1({"fname":"Test","lname":"Test","id":123});
 insert into dataset testds1({"fname":"Testa","lname":"Test","id":124});
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/startwith03.aql b/asterix-app/src/test/resources/runtimets/queries/string/startwith03.aql
index 8aed603..baa7d0b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/startwith03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/startwith03.aql
@@ -15,7 +15,7 @@
 name:string
 }
 
-create dataset testds(TestType) partitioned by key name;
+create dataset testds(TestType) primary key name;
 
 insert into dataset testds({"name":"John Smith"});
 insert into dataset testds({"name":"John Doe"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/strconcat01.aql b/asterix-app/src/test/resources/runtimets/queries/string/strconcat01.aql
index 0b3941d..5266fa1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/strconcat01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/strconcat01.aql
@@ -17,7 +17,7 @@
 lname:string
 }
 
-create dataset testds(TestType) partitioned by key id;
+create dataset testds(TestType) primary key id;
 
 // insert string data into internal dataset testds into the name attribute
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/strlen03.aql b/asterix-app/src/test/resources/runtimets/queries/string/strlen03.aql
index bba2a7f..ddff7ed 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/strlen03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/strlen03.aql
@@ -15,7 +15,7 @@
 name:string
 }
 
-create dataset testds(TestType) partitioned by key name;
+create dataset testds(TestType) primary key name;
 
 insert into dataset testds({"name":"Maradona"});
 insert into dataset testds({"name":"Pele"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/substr05.aql b/asterix-app/src/test/resources/runtimets/queries/string/substr05.aql
index fbdcba4..1a33fdf 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/substr05.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/substr05.aql
@@ -15,7 +15,7 @@
 name : string
 }
 
-create dataset testdst(TestType) partitioned by key name;
+create dataset testdst(TestType) primary key name;
 
 insert into dataset testdst({"name":"UC Berkeley"});
 insert into dataset testdst({"name":"UC Irvine"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/substr06.aql b/asterix-app/src/test/resources/runtimets/queries/string/substr06.aql
index 82d21c2..bfeff2d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/substr06.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/substr06.aql
@@ -14,7 +14,7 @@
 name : string
 }
 
-create dataset testdst(TestType) partitioned by key name;
+create dataset testdst(TestType) primary key name;
 
 insert into dataset testdst({"name":"UC Berkeley"});
 insert into dataset testdst({"name":"UC Irvine"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/toLowerCase03.aql b/asterix-app/src/test/resources/runtimets/queries/string/toLowerCase03.aql
index 411dacf..301af52 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/toLowerCase03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/toLowerCase03.aql
@@ -17,7 +17,7 @@
 name:string
 }
 
-create dataset testds(TestType) partitioned by key name;
+create dataset testds(TestType) primary key name;
 
 insert into dataset testds({"name":"Maradona"});
 insert into dataset testds({"name":"Pele"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/accessors.aql b/asterix-app/src/test/resources/runtimets/queries/temp/accessors.aql
new file mode 100644
index 0000000..9cc9f8d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/accessors.aql
@@ -0,0 +1,22 @@
+/*
+ * Description      :   Check temporal accessors for different types
+ * Expected Result  :   Success
+ * Date             :   31st Aug, 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_accessors.adm";
+
+let $c1 := date("2010-10-30")
+let $c2 := datetime("1987-11-19T23:49:23.938")
+let $c3 := date("-1987-11-19")
+let $c4 := date("09280329")
+let $c5 := datetime("19371229T20030628")
+let $c6 := time("12:23:34.930+07:00")
+let $c7 := string("-0003-01-09T23:12:12.39-07:00")
+let $c8 := duration("P3Y73M632DT49H743M3948.94S")
+
+return {"year1": year($c1), "year2": year($c2), "year3": year($c3), "year4": year($c4), "year5": year($c5), "year6": year($c7), "year7": year($c8), "month1": month($c1), "month2": month($c2), "month3": month($c3), "month4": month($c4), "month5": month($c5), "month6": month($c8), "day1": day($c1), "day2": day($c2), "day3": day($c3), "day4": day($c4), "day5": day($c5), "day6": day($c8), "hour1": hour($c2), "hour2": hour($c5), "hour3": hour($c6), "hour4": hour($c8), "min1": minute($c2), "min2": minute($c5), "min3": minute($c6), "min4": minute($c8), "second1": second($c2), "second2": second($c5), "second3": second($c6), "second4": second($c8), "ms1": millisecond($c2), "ms2": millisecond($c5), "ms3": millisecond($c6), "ms4": millisecond($c8)}
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/adjust_timezone.aql b/asterix-app/src/test/resources/runtimets/queries/temp/adjust_timezone.aql
new file mode 100644
index 0000000..9f6c60e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/adjust_timezone.aql
@@ -0,0 +1,17 @@
+/*
+ * Description      :   Check the adjust-timezone functions
+ * Expected Result  :   Success
+ * Date             :   15th Oct, 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_adjust_timezone.adm";
+
+let $t1 := time("20:15:10.327")
+let $dt1 := datetime("2010-10-23T01:12:13.329Z")
+let $s1 := adjust-time-for-timezone($t1, "+0800")
+let $s2 := adjust-datetime-for-timezone($dt1, "-0615")
+return { "string1" : $s1, "string2" : $s2 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/calendar_duration.aql b/asterix-app/src/test/resources/runtimets/queries/temp/calendar_duration.aql
new file mode 100644
index 0000000..67360ce
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/calendar_duration.aql
@@ -0,0 +1,28 @@
+/*
+ * Description      :   Check the calendar-duration functions
+ * Expected Result  :   Success
+ * Date             :   15th Oct, 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_calendar_duration.adm";
+
+let $t1 := datetime("1987-11-19T23:49:23.938")
+let $t2 := date("-1328-10-23")
+let $dr1 := duration("P7382DT39283M3921.329S")
+let $dr2 := duration("-PT63H398212M3219.328S")
+let $dr3 := duration("P1Y90M")
+let $dr4 := duration("-P3Y89M4089DT47382.983S")
+let $cdr1 := calendar-duration-from-datetime($t1, $dr1)
+let $cdr2 := calendar-duration-from-datetime($t1, $dr2)
+let $cdr3 := calendar-duration-from-datetime($t1, $dr3)
+let $cdr4 := calendar-duration-from-datetime($t1, $dr4)
+let $cdr5 := calendar-duration-from-date($t2, $dr1)
+let $cdr6 := calendar-duration-from-date($t2, $dr2)
+let $cdr7 := calendar-duration-from-date($t2, $dr3)
+let $cdr8 := calendar-duration-from-date($t2, $dr4)
+
+return { "cduration1":$cdr1, "cduration2":$cdr2, "cduration3":$cdr3, "cduration4":$cdr4, "cduration5":$cdr5, "cduration6":$cdr6, "cduration7":$cdr7, "cduration8":$cdr8 }
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/date_functions.aql b/asterix-app/src/test/resources/runtimets/queries/temp/date_functions.aql
new file mode 100644
index 0000000..310fa43
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/date_functions.aql
@@ -0,0 +1,24 @@
+/*
+ * Description      :   Check temporal functions for date type
+ * Expected Result  :   Success
+ * Date             :   24th Sep, 2012
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_date_functions.adm";
+
+let $d1 := date-from-unix-time-in-days(15600)
+let $dt1 := datetime("1327-12-02T23:35:49.938Z")
+let $d2 := date-from-datetime($dt1)
+let $dt2 := datetime("2012-10-11T02:30:23+03:00")
+let $d3 := date-from-datetime($dt2)
+let $dr1 := duration("-P2Y1M90DT30H")
+let $d4 := add-date-duration($d1, $dr1)
+let $dr2 := duration("P300Y900MT360000M")
+let $d5 := add-date-duration($d2, $dr2)
+let $dr3 := subtract-date($d5, $d2)
+let $dr4 := subtract-date($d4, $d1)
+
+return { "date1" : $d1, "date2" : $d2, "date3" : $d3, "date4" : $d4, "date5" : $d5, "duration1" : $dr3, "duration2" : $dr4  }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/datetime_functions.aql b/asterix-app/src/test/resources/runtimets/queries/temp/datetime_functions.aql
new file mode 100644
index 0000000..a70c9f2
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/datetime_functions.aql
@@ -0,0 +1,20 @@
+/*
+ * Description      :   Check temporal functions for datetime
+ * Expected Result  :   Success
+ * Date             :   24th Sep, 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_datetime_functions.adm";
+
+let $dt1 := datetime-from-unix-time-in-ms(956007429)
+let $d1 := date("1327-12-02")
+let $t1 := time("23:35:49.938Z")
+let $dt2 := datetime-from-date-time($d1, $t1)
+let $dr1 := subtract-datetime($dt2, $dt1)
+let $dt3 := add-datetime-duration($dt1, $dr1)
+
+return { "datetime1" : $dt1, "datetime2" : $dt2, "datetime3" : $dt3, "duration1" : $dr1 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/insert_from_delimited_ds.aql b/asterix-app/src/test/resources/runtimets/queries/temp/insert_from_delimited_ds.aql
new file mode 100644
index 0000000..4520f46
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/insert_from_delimited_ds.aql
@@ -0,0 +1,26 @@
+/*
+ * Test case name: date-insert.aql
+ * Description: verify insertion operation for date type
+ * Expected result: success
+ */
+ 
+drop dataverse testdvt if exists;
+create dataverse testdvt;
+use dataverse testdvt;
+
+create type testtype as closed {
+  id: string,
+  dateField: date,
+  timeField: time,
+  datetimeField: datetime,
+  durationField: duration
+}
+
+write output to nc1:"rttest/temp_insert_from_delimited_ds.adm";
+
+create external dataset testds(testtype)
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/temporal/temporalData.txt"),("format"="delimited-text"),("delimiter"="|"));
+
+for $r in dataset("testds") 
+return {"date": $r.dateField, "time": $r.timeField, "datetime": $r.datetimeField, "duration": $r.durationField }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/insert_from_ext_ds.aql b/asterix-app/src/test/resources/runtimets/queries/temp/insert_from_ext_ds.aql
new file mode 100644
index 0000000..5813696
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/insert_from_ext_ds.aql
@@ -0,0 +1,27 @@
+/*
+ * Test case name: date-insert.aql
+ * Description: verify insertion operation for date type
+ * Expected result: success
+ */
+ 
+drop dataverse testdvt if exists;
+create dataverse testdvt;
+use dataverse testdvt;
+
+create type testtype as open {
+  id: string,
+  dateField: date?,
+  timeField: time?,
+  datetimeField: datetime?,
+  durationField: duration?,
+  intervalField: interval?
+}
+
+write output to nc1:"rttest/temp_insert_from_ext_ds.adm";
+
+create external dataset testds(testtype)
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/temporal/temporalData.json"),("format"="adm"));
+
+for $r in dataset("testds") 
+return {"date": $r.dateField, "time": $r.timeField, "datetime": $r.datetimeField, "duration": $r.durationField, "interval": $r.intervalField }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/interval_functions.aql b/asterix-app/src/test/resources/runtimets/queries/temp/interval_functions.aql
new file mode 100644
index 0000000..a29f7f5
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/interval_functions.aql
@@ -0,0 +1,56 @@
+/*
+ * Description      :   Check temporal functions for interval
+ * Expected Result  :   Success
+ * Date             :   2nd Nov, 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_interval_functions.adm";
+
+let $itv1 := interval-from-date("2010-10-30", "2010-12-21")
+let $itv2 := interval-from-date("2011-10-30", "2012-10-21")
+let $itv3 := interval-from-date("2010-06-30", "2013-01-01")
+let $blnBefore1 := interval-before($itv1, $itv2)
+let $blnAfter1 := interval-after($itv2, $itv1)
+let $blnBefore2 := interval-before($itv1, $itv3)
+let $blnAfter2 := interval-after($itv3, $itv1)
+
+let $itv4 := interval-from-datetime("2012-06-26T01:01:01.111", "2012-07-27T02:02:02.222")
+let $itv5 := interval-from-datetime("20120727T020202222", "2013-08-08T03:03:03.333")
+let $itv6 := interval-from-datetime("19000707T020202222", "2013-08-07T03:03:03.333")
+let $blnMeet1 := interval-meets($itv4, $itv5)
+let $blnMetBy1 := interval-met-by($itv5, $itv4)
+let $blnMeet2 := interval-meets($itv6, $itv4)
+let $blnMetBy2 := interval-met-by($itv6, $itv4)
+
+let $itv7 := interval-from-time("12:32:38", "20:29:20")
+let $itv8 := interval-from-time("17:48:19", "22:19:49")
+let $itv9 := interval-from-time("01:32:49", "12:33:00")
+let $blnOverlaps1 := interval-overlaps($itv7, $itv8)
+let $blnOverlapped1 := interval-overlapped-by($itv8, $itv7)
+let $blnOverlaps2 := interval-overlaps($itv8, $itv7)
+let $blnOverlapped2 := interval-overlapped-by($itv7, $itv8)
+let $blnOverlap1 := overlap($itv9, $itv7)
+let $blnOverlap2 := overlap($itv9, $itv8)
+
+let $itv10 := interval-from-date("2010-10-30", "2010-11-30")
+let $blnStarts1 := interval-starts($itv10, $itv1)
+let $blnStarts2 := interval-starts($itv10, $itv2)
+let $blnStartedBy1 := interval-started-by($itv1, $itv10)
+let $blnStartedBy2 := interval-started-by($itv10, $itv2)
+
+let $blnCovers1 := interval-covers($itv6, $itv4)
+let $blnCovers2 := interval-covers($itv6, $itv5)
+let $blnCoveredBy1 := interval-covered-by($itv4, $itv6)
+let $blnCoveredBy2 := interval-covered-by($itv5, $itv6)
+
+let $itv11 := interval-from-time("19:00:00.009", "20:29:20.000")
+let $blnEnds1 := interval-ends($itv11, $itv7)
+let $blnEnds2 := interval-ends($itv11, $itv8)
+let $blnEndedBy1 := interval-ended-by($itv7, $itv11)
+let $blnEndedBy2 := interval-ended-by($itv8, $itv11)
+
+return { "before1" : $blnBefore1, "before2" : $blnBefore2, "after1" : $blnAfter1, "after2" : $blnAfter2, "meet1" : $blnMeet1, "meet2" : $blnMeet2, "metby1" : $blnMetBy1, "metby2" : $blnMetBy2, "overlaps1" : $blnOverlaps1, "overlaps2" : $blnOverlaps2, "overlapped1" : $blnOverlapped1, "overlapped2" : $blnOverlapped2, "overlap1" : $blnOverlap1, "overlap2" : $blnOverlap2, "starts1" : $blnStarts1, "starts2" : $blnStarts2, "startedby1" : $blnStartedBy1, "startedby2" : $blnStartedBy2, "covers1" : $blnCovers1, "covers2" : $blnCovers2, "coveredby1" : $blnCoveredBy1, "coveredby2" : $blnCoveredBy2, "ends1" : $blnEnds1, "ends2" : $blnEnds2, "endedby1" : $blnEndedBy1, "endedby2" : $blnEndedBy2 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/time_functions.aql b/asterix-app/src/test/resources/runtimets/queries/temp/time_functions.aql
new file mode 100644
index 0000000..29adcc0
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/time_functions.aql
@@ -0,0 +1,28 @@
+/*
+ * Description      :   Check temporal functions for time
+ * Expected Result  :   Success
+ * Date             :   24th Sep, 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_time_functions.adm";
+
+let $t1 := time-from-unix-time-in-ms(1560074)
+let $dt1 := datetime("1327-12-02T23:35:49.938Z")
+let $t2 := time-from-datetime($dt1)
+let $dt2 := datetime("2012-10-11T02:30:23+03:00")
+let $t3 := time-from-datetime($dt2)
+let $dr1 := duration("-PT30H")
+let $t4 := add-time-duration($t1, $dr1)
+let $dr2 := duration("PT36M")
+let $t5 := add-time-duration($t2, $dr2)
+let $dr3 := subtract-time($t5, $t2)
+let $dr4 := subtract-time($t4, $t1)
+let $ct := current-time()
+let $cd := current-date()
+let $cdt := current-datetime()
+
+return { "time1" : $t1, "time2" : $t2, "time3" : $t3, "time4" : $t4, "time5" : $t5, "duration1" : $dr3, "duration2" : $dr4  }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/distinct_by.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/distinct_by.aql
index 95d715e..3e41042 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/distinct_by.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/distinct_by.aql
@@ -24,7 +24,7 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/group_no_agg.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/group_no_agg.aql
index dedaa7f..8b941e4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/group_no_agg.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/group_no_agg.aql
@@ -10,7 +10,7 @@
 }
 
 create dataset Regions_group_no_agg(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 
 write output to nc1:"rttest/tpch_group_no_agg.adm";
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item.aql
index 3b641a4..b775f09 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item.aql
@@ -89,21 +89,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item_int64.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item_int64.aql
index cfac014..27fa512 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item_int64.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item_int64.aql
@@ -89,21 +89,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q11_important_stock.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q11_important_stock.aql
index 245c7f5..9e4b165 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q11_important_stock.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q11_important_stock.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q12_shipping.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q12_shipping.aql
index 9dcade8..b541615 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q12_shipping.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q12_shipping.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q13_customer_distribution.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q13_customer_distribution.aql
index 9a91177..c4a987a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q13_customer_distribution.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q13_customer_distribution.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q14_promotion_effect.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q14_promotion_effect.aql
index a38b1d6..4ea7988 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q14_promotion_effect.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q14_promotion_effect.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q15_top_supplier.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q15_top_supplier.aql
index 49b0c03..29830a9 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q15_top_supplier.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q15_top_supplier.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q16_parts_supplier_relationship.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q16_parts_supplier_relationship.aql
index ad16d3c..978fb0f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q16_parts_supplier_relationship.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q16_parts_supplier_relationship.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q17_small_quantity_order_revenue.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q17_small_quantity_order_revenue.aql
index 05cf693..92e4083 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q17_small_quantity_order_revenue.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q17_small_quantity_order_revenue.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q18_large_volume_customer.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q18_large_volume_customer.aql
index 1347ff5..9ab4f83 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q18_large_volume_customer.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q18_large_volume_customer.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q19_discounted_revenue.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q19_discounted_revenue.aql
index b856eed..c83f663 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q19_discounted_revenue.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q19_discounted_revenue.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q1_pricing_summary_report_nt.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q1_pricing_summary_report_nt.aql
index af39b3f..b6de1fa 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q1_pricing_summary_report_nt.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q1_pricing_summary_report_nt.aql
@@ -22,7 +22,7 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q20_potential_part_promotion.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q20_potential_part_promotion.aql
index bdb591f..2143cfa 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q20_potential_part_promotion.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q20_potential_part_promotion.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q21_suppliers_who_kept_orders_waiting.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q21_suppliers_who_kept_orders_waiting.aql
index f544e09..32cf149 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q21_suppliers_who_kept_orders_waiting.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q21_suppliers_who_kept_orders_waiting.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q22_global_sales_opportunity.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q22_global_sales_opportunity.aql
index 29131c8..b2ac2d0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q22_global_sales_opportunity.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q22_global_sales_opportunity.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q2_minimum_cost_supplier.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q2_minimum_cost_supplier.aql
index 5490213..c5e17d6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q2_minimum_cost_supplier.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q2_minimum_cost_supplier.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q3_shipping_priority_nt.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q3_shipping_priority_nt.aql
index 68fb864..c3fcc25 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q3_shipping_priority_nt.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q3_shipping_priority_nt.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q4_order_priority.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q4_order_priority.aql
index 3f1c7ec..6d755d2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q4_order_priority.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q4_order_priority.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q5_local_supplier_volume.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q5_local_supplier_volume.aql
index d345b3b..e64d98a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q5_local_supplier_volume.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q5_local_supplier_volume.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q6_forecast_revenue_change.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q6_forecast_revenue_change.aql
index 3ca0424..dee85bb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q6_forecast_revenue_change.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q6_forecast_revenue_change.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q7_volume_shipping.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q7_volume_shipping.aql
index c91eca5..6432361 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q7_volume_shipping.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q7_volume_shipping.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q8_national_market_share.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q8_national_market_share.aql
index 420a179..2e0eefb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q8_national_market_share.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q8_national_market_share.aql
@@ -90,21 +90,21 @@
 }
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset Partsupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q9_product_type_profit_nt.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q9_product_type_profit_nt.aql
index 33c2aad..21cdcdd 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q9_product_type_profit_nt.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q9_product_type_profit_nt.aql
@@ -91,21 +91,21 @@
 
 
 create dataset LineItem(LineItemType)
-  partitioned by key l_orderkey, l_linenumber;
+  primary key l_orderkey, l_linenumber;
 create dataset Orders(OrderType)
-  partitioned by key o_orderkey;
+  primary key o_orderkey;
 create dataset Supplier(SupplierType)
-  partitioned by key s_suppkey;
+  primary key s_suppkey;
 create dataset Region(RegionType) 
-  partitioned by key r_regionkey;
+  primary key r_regionkey;
 create dataset Nation(NationType) 
-  partitioned by key n_nationkey;
+  primary key n_nationkey;
 create dataset Part(PartType)
-  partitioned by key p_partkey;
+  primary key p_partkey;
 create dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey;  
+  primary key ps_partkey, ps_suppkey;  
 create dataset Customer(CustomerType) 
-  partitioned by key c_custkey;
+  primary key c_custkey;
 
 load dataset LineItem 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf09.aql b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf09.aql
index b16e2dd..8a09ea2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf09.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf09.aql
@@ -13,7 +13,7 @@
 id : int32
 }
 
-create dataset test.t1(TestType) partitioned by key id;
+create dataset test.t1(TestType) primary key id;
 
 insert into dataset test.t1({"id":345});
 insert into dataset test.t1({"id":315});
diff --git a/asterix-app/src/test/resources/runtimets/results/comparison/date_order.adm b/asterix-app/src/test/resources/runtimets/results/comparison/date_order.adm
new file mode 100644
index 0000000..3fd4c19
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/comparison/date_order.adm
@@ -0,0 +1,6 @@
+date("-0500-03-21")
+date("1362-02-28")
+date("1600-02-29")
+date("2012-02-29")
+date("2021-03-01")
+date("2049-04-23")
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/comparison/time_order.adm b/asterix-app/src/test/resources/runtimets/results/comparison/time_order.adm
new file mode 100644
index 0000000..c937bdd
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/comparison/time_order.adm
@@ -0,0 +1,6 @@
+time("00:00:00.000Z")
+time("02:00:00.000Z")
+time("19:00:00.000Z")
+time("20:00:00.470Z")
+time("23:00:00.382Z")
+time("23:59:59.999Z")
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/constructor/int_01.adm b/asterix-app/src/test/resources/runtimets/results/constructor/int_01.adm
index f9a1203..470d1f8 100644
--- a/asterix-app/src/test/resources/runtimets/results/constructor/int_01.adm
+++ b/asterix-app/src/test/resources/runtimets/results/constructor/int_01.adm
@@ -1 +1 @@
-{ "int8": 80i8, "int16": 160i16, "int32": 320, "int64": 640i64, "int8": -80i8, "int16": -160i16, "int32": -320, "int64": -640i64 }
\ No newline at end of file
+{ "int8": 80i8, "int16": 160i16, "int32": 320, "int64": 640i64, "int8_2": -80i8, "int16_2": -160i16, "int32_2": -320, "int64_2": -640i64, "int64_min": -9223372036854775808i64 }
diff --git a/asterix-app/src/test/resources/runtimets/results/constructor/interval.adm b/asterix-app/src/test/resources/runtimets/results/constructor/interval.adm
new file mode 100644
index 0000000..8fb7e25
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/constructor/interval.adm
@@ -0,0 +1 @@
+{ "interval1": interval("date("2010-10-30"), date("2012-10-21")"), "interval2": interval("time("03:04:05.678Z"), time("23:24:25.267Z")"), "interval3": interval("datetime("-1987-11-19T02:43:57.938Z"), datetime("1999-11-12T12:49:35.948Z")"), "interval4": interval("date("0001-12-27"), date("0006-01-27")"), "interval5": interval("time("20:03:20.948Z"), time("20:57:50.886Z")"), "interval6": interval("datetime("-2043-11-19T15:32:39.293Z"), datetime("-1603-03-12T12:12:38.242Z")") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv02.adm b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv02.adm
index f8d0c9d..8bd73db 100644
--- a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv02.adm
+++ b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv02.adm
@@ -1,4 +1,4 @@
-{ "DataverseName": "student", "DatasetName": "gdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:22:47 PST 2012" }
-{ "DataverseName": "student", "DatasetName": "ugdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:22:47 PST 2012" }
-{ "DataverseName": "teacher", "DatasetName": "prof", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:22:47 PST 2012" }
-{ "DataverseName": "teacher", "DatasetName": "pstdoc", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:22:47 PST 2012" }
+{ "DataverseName": "student", "DatasetName": "gdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:01:46 PST 2013" }
+{ "DataverseName": "student", "DatasetName": "ugdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:01:46 PST 2013" }
+{ "DataverseName": "teacher", "DatasetName": "prof", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:01:46 PST 2013" }
+{ "DataverseName": "teacher", "DatasetName": "pstdoc", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:01:46 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv04.adm b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv04.adm
index bd3c0af..ff1f9df 100644
--- a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv04.adm
+++ b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv04.adm
@@ -1,4 +1,4 @@
-{ "DataverseName": "student", "DatasetName": "gdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:25:37 PST 2012" }
-{ "DataverseName": "student", "DatasetName": "ugdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:25:37 PST 2012" }
-{ "DataverseName": "teacher", "DatasetName": "prof", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:25:37 PST 2012" }
-{ "DataverseName": "teacher", "DatasetName": "pstdoc", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:25:37 PST 2012" }
+{ "DataverseName": "student", "DatasetName": "gdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:03:50 PST 2013" }
+{ "DataverseName": "student", "DatasetName": "ugdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:03:50 PST 2013" }
+{ "DataverseName": "teacher", "DatasetName": "prof", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:03:50 PST 2013" }
+{ "DataverseName": "teacher", "DatasetName": "pstdoc", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:03:50 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19.adm b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19.adm
index be94bc8..7be9c57 100644
--- a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19.adm
+++ b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19.adm
@@ -1,7 +1,7 @@
-{ "DataverseName": "test1", "DatasetName": "TwitterData", "DataTypeName": "Tweet", "DatasetType": "EXTERNAL", "InternalDetails": null, "ExternalDetails": { "DatasourceAdapter": "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter", "Properties": [ { "Name": "path", "Value": "nc1://data/twitter/extrasmalltweets.txt" }, { "Name": "format", "Value": "adm" } ] }, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
-{ "DataverseName": "test1", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
-{ "DataverseName": "test1", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
-{ "DataverseName": "test1", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
-{ "DataverseName": "test2", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
-{ "DataverseName": "test2", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
-{ "DataverseName": "test2", "DatasetName": "t4", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
+{ "DataverseName": "test1", "DatasetName": "TwitterData", "DataTypeName": "Tweet", "DatasetType": "EXTERNAL", "InternalDetails": null, "ExternalDetails": { "DatasourceAdapter": "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter", "Properties": [ { "Name": "path", "Value": "nc1://data/twitter/extrasmalltweets.txt" }, { "Name": "format", "Value": "adm" } ] }, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
+{ "DataverseName": "test1", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
+{ "DataverseName": "test1", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
+{ "DataverseName": "test1", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
+{ "DataverseName": "test2", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
+{ "DataverseName": "test2", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
+{ "DataverseName": "test2", "DatasetName": "t4", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/join_across_dataverses.adm b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/join_across_dataverses.adm
index 87619a8..e78ad8f 100644
--- a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/join_across_dataverses.adm
+++ b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/join_across_dataverses.adm
@@ -1,3 +1,3 @@
-{ "cust_name": "Jodi Alex", "cust_age": 19, "order_total": 7.206f, "orderList": [ 10, 5 ], "orderList": {{ 10, 5 }} }
-{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 14.2326f, "orderList": [ 10, 775 ], "orderList": {{ 10, 775 }} }
-{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 97.20656f, "orderList": [ 1000, 775 ], "orderList": {{ 1000, 775 }} }
+{ "cust_name": "Jodi Alex", "cust_age": 19, "order_total": 7.206f, "orderList": [ 10, 5 ] }
+{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 14.2326f, "orderList": [ 10, 775 ] }
+{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 97.20656f, "orderList": [ 1000, 775 ] }
diff --git a/asterix-app/src/test/resources/runtimets/results/custord/join_q_01.adm b/asterix-app/src/test/resources/runtimets/results/custord/join_q_01.adm
index 7648e4a..e78ad8f 100644
--- a/asterix-app/src/test/resources/runtimets/results/custord/join_q_01.adm
+++ b/asterix-app/src/test/resources/runtimets/results/custord/join_q_01.adm
@@ -1,3 +1,3 @@
-{ "cust_name": "Jodi Alex", "cust_age": 19, "order_total": 7.206f, "orderList": [ 10, 5 ], "orderList": {{ 10, 5 }} }
-{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 14.2326f, "orderList": [ 10, 775 ], "orderList": {{ 10, 775 }} }
-{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 97.20656f, "orderList": [ 1000, 775 ], "orderList": {{ 1000, 775 }} }
\ No newline at end of file
+{ "cust_name": "Jodi Alex", "cust_age": 19, "order_total": 7.206f, "orderList": [ 10, 5 ] }
+{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 14.2326f, "orderList": [ 10, 775 ] }
+{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 97.20656f, "orderList": [ 1000, 775 ] }
diff --git a/asterix-app/src/test/resources/runtimets/results/custord/order_q_03.adm b/asterix-app/src/test/resources/runtimets/results/custord/order_q_03.adm
index 903c00d..4e02c4b 100644
--- a/asterix-app/src/test/resources/runtimets/results/custord/order_q_03.adm
+++ b/asterix-app/src/test/resources/runtimets/results/custord/order_q_03.adm
@@ -1,4 +1,4 @@
-{ "orderid": 1000, "ordertot": 97.20656f, "list": [ "ORDER_DELIVERED", "Kathryne" ], "item1": "ORDER_DELIVERED", "item1": "ORDER_DELIVERED", "item2": "Kathryne", "item3": null }
-{ "orderid": 10, "ordertot": 7.206f, "list": [ "ORDER_DELIVERED", "ALEX" ], "item1": "ORDER_DELIVERED", "item1": "ORDER_DELIVERED", "item2": "ALEX", "item3": null }
-{ "orderid": 100, "ordertot": 124.26f, "list": [ "ORDER_DELIVERED", "YASSER" ], "item1": "ORDER_DELIVERED", "item1": "ORDER_DELIVERED", "item2": "YASSER", "item3": null }
-{ "orderid": 10, "ordertot": 14.2326f, "list": [ "ORDER_DELIVERED", "MIKE" ], "item1": "ORDER_DELIVERED", "item1": "ORDER_DELIVERED", "item2": "MIKE", "item3": null }
+{ "orderid": 1000, "ordertot": 97.20656f, "list": [ "ORDER_DELIVERED", "Kathryne" ], "item1": "ORDER_DELIVERED", "item2": "Kathryne", "item3": null }
+{ "orderid": 10, "ordertot": 7.206f, "list": [ "ORDER_DELIVERED", "ALEX" ], "item1": "ORDER_DELIVERED", "item2": "ALEX", "item3": null }
+{ "orderid": 100, "ordertot": 124.26f, "list": [ "ORDER_DELIVERED", "YASSER" ], "item1": "ORDER_DELIVERED", "item2": "YASSER", "item3": null }
+{ "orderid": 10, "ordertot": 14.2326f, "list": [ "ORDER_DELIVERED", "MIKE" ], "item1": "ORDER_DELIVERED", "item2": "MIKE", "item3": null }
diff --git a/asterix-app/src/test/resources/runtimets/results/custord/order_q_04.adm b/asterix-app/src/test/resources/runtimets/results/custord/order_q_04.adm
index f22ea7f..3992f7d 100644
--- a/asterix-app/src/test/resources/runtimets/results/custord/order_q_04.adm
+++ b/asterix-app/src/test/resources/runtimets/results/custord/order_q_04.adm
@@ -1,4 +1,4 @@
-{ "orderid": 1000, "ordertot": 97.20656f, "list": [ [ "1.0f", "yassser" ], [ 11, 14, "yasir", 1.6f ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), null ], "item1": [ "1.0f", "yassser" ], "item1": [ "1.0f", "yassser" ], "item2": [ 11, 14, "yasir", 1.6f ], "item5": null, "item10": null }
-{ "orderid": 10, "ordertot": 7.206f, "list": [ [ 1.0f, "5.2f", "60" ], [ 13231, "foo", null, 13.25d, 13.2f ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), null ], "item1": [ 1.0f, "5.2f", "60" ], "item1": [ 1.0f, "5.2f", "60" ], "item2": [ 13231, "foo", null, 13.25d, 13.2f ], "item5": null, "item10": null }
-{ "orderid": 100, "ordertot": 124.26f, "list": [ [ 1.3f, 5.2f, "60", 12.32f ], [ 10, 2.0f, 3.0d, 40 ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), null ], "item1": [ 1.3f, 5.2f, "60", 12.32f ], "item1": [ 1.3f, 5.2f, "60", 12.32f ], "item2": [ 10, 2.0f, 3.0d, 40 ], "item5": null, "item10": null }
-{ "orderid": 10, "ordertot": 14.2326f, "list": [ [ 2.4f, "15" ], [ 110 ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), { "oid": 75, "total": 87.61863f } ], "item1": [ 2.4f, "15" ], "item1": [ 2.4f, "15" ], "item2": [ 110 ], "item5": { "oid": 75, "total": 87.61863f }, "item10": null }
+{ "orderid": 1000, "ordertot": 97.20656f, "list": [ [ "1.0f", "yassser" ], [ 11, 14, "yasir", 1.6f ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), null ], "item1": [ "1.0f", "yassser" ], "item2": [ 11, 14, "yasir", 1.6f ], "item5": null, "item10": null }
+{ "orderid": 10, "ordertot": 7.206f, "list": [ [ 1.0f, "5.2f", "60" ], [ 13231, "foo", null, 13.25d, 13.2f ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), null ], "item1": [ 1.0f, "5.2f", "60" ], "item2": [ 13231, "foo", null, 13.25d, 13.2f ], "item5": null, "item10": null }
+{ "orderid": 100, "ordertot": 124.26f, "list": [ [ 1.3f, 5.2f, "60", 12.32f ], [ 10, 2.0f, 3.0d, 40 ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), null ], "item1": [ 1.3f, 5.2f, "60", 12.32f ], "item2": [ 10, 2.0f, 3.0d, 40 ], "item5": null, "item10": null }
+{ "orderid": 10, "ordertot": 14.2326f, "list": [ [ 2.4f, "15" ], [ 110 ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), { "oid": 75, "total": 87.61863f } ], "item1": [ 2.4f, "15" ], "item2": [ 110 ], "item5": { "oid": 75, "total": 87.61863f }, "item10": null }
diff --git a/asterix-app/src/test/resources/runtimets/results/custord/order_q_05.adm b/asterix-app/src/test/resources/runtimets/results/custord/order_q_05.adm
index dd3420e..4d8b37c 100644
--- a/asterix-app/src/test/resources/runtimets/results/custord/order_q_05.adm
+++ b/asterix-app/src/test/resources/runtimets/results/custord/order_q_05.adm
@@ -1,4 +1,4 @@
-{ "orderid": 1000, "ordertot": 97.20656f, "emptyorderedlist": [  ], "emptyunorderedlist": {{  }}, "olist_item1": null, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
-{ "orderid": 10, "ordertot": 7.206f, "emptyorderedlist": [  ], "emptyunorderedlist": {{  }}, "olist_item1": null, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
-{ "orderid": 100, "ordertot": 124.26f, "emptyorderedlist": [  ], "emptyunorderedlist": {{  }}, "olist_item1": null, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
-{ "orderid": 10, "ordertot": 14.2326f, "emptyorderedlist": [  ], "emptyunorderedlist": {{  }}, "olist_item1": null, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
+{ "orderid": 1000, "ordertot": 97.20656f, "emptyorderedlist": [  ], "emptyunorderedlist": {{  }}, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
+{ "orderid": 10, "ordertot": 7.206f, "emptyorderedlist": [  ], "emptyunorderedlist": {{  }}, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
+{ "orderid": 100, "ordertot": 124.26f, "emptyorderedlist": [  ], "emptyunorderedlist": {{  }}, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
+{ "orderid": 10, "ordertot": 14.2326f, "emptyorderedlist": [  ], "emptyunorderedlist": {{  }}, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/create-drop-cltype.adm b/asterix-app/src/test/resources/runtimets/results/dml/create-drop-cltype.adm
new file mode 100644
index 0000000..e2f6676
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/create-drop-cltype.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "DatatypeName": "TestType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "id", "FieldType": "int32" }, { "FieldName": "salary", "FieldType": "Field_salary_in_TestType" }, { "FieldName": "name", "FieldType": "string" }, { "FieldName": "durtn", "FieldType": "Field_durtn_in_TestType" }, { "FieldName": "inter", "FieldType": "interval" }, { "FieldName": "dt", "FieldType": "Field_dt_in_TestType" }, { "FieldName": "tm", "FieldType": "time" }, { "FieldName": "pt", "FieldType": "Field_pt_in_TestType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Feb 11 18:10:43 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/create-drop-opntype.adm b/asterix-app/src/test/resources/runtimets/results/dml/create-drop-opntype.adm
new file mode 100644
index 0000000..8c0b451
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/create-drop-opntype.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "DatatypeName": "TestType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "id", "FieldType": "int32" }, { "FieldName": "salary", "FieldType": "Field_salary_in_TestType" }, { "FieldName": "name", "FieldType": "string" }, { "FieldName": "durtn", "FieldType": "Field_durtn_in_TestType" }, { "FieldName": "inter", "FieldType": "interval" }, { "FieldName": "dt", "FieldType": "Field_dt_in_TestType" }, { "FieldName": "tm", "FieldType": "time" }, { "FieldName": "pt", "FieldType": "Field_pt_in_TestType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Feb 11 18:12:10 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/drop-empty-secondary-indexes.adm b/asterix-app/src/test/resources/runtimets/results/dml/drop-empty-secondary-indexes.adm
new file mode 100644
index 0000000..9b49756
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/drop-empty-secondary-indexes.adm
@@ -0,0 +1,3 @@
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Fri Feb 08 17:57:01 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "GroupName", "IndexStructure": "BTREE", "SearchKey": [ "GroupName", "DataverseName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Fri Feb 08 17:57:01 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "NestedDatatypeName", "TopDatatypeName" ], "IsPrimary": false, "Timestamp": "Fri Feb 08 17:57:01 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm
index 17d8d1d..4e3714c 100644
--- a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm
@@ -1 +1 @@
-{ "DataverseName": "feeds", "DatasetName": "TweetFeed", "DataTypeName": "TweetType", "DatasetType": "FEED", "InternalDetails": null, "ExternalDetails": null, "FeedDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "DatasourceAdapter": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Properties": [ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } ], "Function": null, "Status": "INACTIVE" }, "Timestamp": "Mon Dec 24 13:51:31 PST 2012" }
+{ "DataverseName": "feeds", "DatasetName": "TweetFeed", "DataTypeName": "TweetType", "DatasetType": "FEED", "InternalDetails": null, "ExternalDetails": null, "FeedDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "DatasourceAdapter": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Properties": [ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } ], "Function": null, "Status": "INACTIVE" }, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:07:24 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm
index 2fd80d983..8011e4b 100644
--- a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm
@@ -1 +1 @@
-{ "DataverseName": "feeds", "DatasetName": "TweetFeed", "DataTypeName": "TweetType", "DatasetType": "FEED", "InternalDetails": null, "ExternalDetails": null, "FeedDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "DatasourceAdapter": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Properties": [ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } ], "Function": "feeds.feed_processor@1", "Status": "INACTIVE" }, "Timestamp": "Mon Dec 24 13:49:20 PST 2012" }
+{ "DataverseName": "feeds", "DatasetName": "TweetFeed", "DataTypeName": "TweetType", "DatasetType": "FEED", "InternalDetails": null, "ExternalDetails": null, "FeedDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "DatasourceAdapter": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Properties": [ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } ], "Function": "feeds.feed_processor@1", "Status": "INACTIVE" }, "Hints": {{  }}, "Timestamp": "Tue Jan 29 19:08:49 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_5.adm b/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_5.adm
new file mode 100644
index 0000000..a7ec8f6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_5.adm
@@ -0,0 +1,100 @@
+{ "id": 1, "dblpid": "books/acm/kim95/AnnevelinkACFHK95", "title": "Object SQL - A Language for the Design and Implementation of Object Databases.", "authors": "Jurgen Annevelink Rafiul Ahad Amelia Carlson Daniel H. Fishman Michael L. Heytens William Kent", "misc": "2002-01-03 42-68 1995 Modern Database Systems db/books/collections/kim95.html#AnnevelinkACFHK95" }
+{ "id": 2, "dblpid": "books/acm/kim95/Blakeley95", "title": "OQL[C++]  Extending C++ with an Object Query Capability.", "authors": "José A. Blakeley", "misc": "2002-01-03 69-88 Modern Database Systems db/books/collections/kim95.html#Blakeley95 1995" }
+{ "id": 3, "dblpid": "books/acm/kim95/BreitbartGS95", "title": "Transaction Management in Multidatabase Systems.", "authors": "Yuri Breitbart Hector Garcia-Molina Abraham Silberschatz", "misc": "2004-03-08 573-591 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartGS95 1995" }
+{ "id": 4, "dblpid": "books/acm/kim95/ChristodoulakisK95", "title": "Multimedia Information Systems  Issues and Approaches.", "authors": "Stavros Christodoulakis Leonidas Koveos", "misc": "2002-01-03 318-337 1995 Modern Database Systems db/books/collections/kim95.html#ChristodoulakisK95" }
+{ "id": 5, "dblpid": "books/acm/kim95/DayalHW95", "title": "Active Database Systems.", "authors": "Umeshwar Dayal Eric N. Hanson Jennifer Widom", "misc": "2002-01-03 434-456 1995 Modern Database Systems db/books/collections/kim95.html#DayalHW95" }
+{ "id": 6, "dblpid": "books/acm/kim95/DittrichD95", "title": "Where Object-Oriented DBMSs Should Do Better  A Critique Based on Early Experiences.", "authors": "Angelika Kotz Dittrich Klaus R. Dittrich", "misc": "2002-01-03 238-254 1995 Modern Database Systems db/books/collections/kim95.html#DittrichD95" }
+{ "id": 7, "dblpid": "books/acm/kim95/Garcia-MolinaH95", "title": "Distributed Databases.", "authors": "Hector Garcia-Molina Meichun Hsu", "misc": "2002-01-03 477-493 1995 Modern Database Systems db/books/collections/kim95.html#Garcia-MolinaH95" }
+{ "id": 8, "dblpid": "books/acm/kim95/Goodman95", "title": "An Object-Oriented DBMS War Story  Developing a Genome Mapping Database in C++.", "authors": "Nathan Goodman", "misc": "2002-01-03 216-237 1995 Modern Database Systems db/books/collections/kim95.html#Goodman95" }
+{ "id": 9, "dblpid": "books/acm/kim95/Kaiser95", "title": "Cooperative Transactions for Multiuser Environments.", "authors": "Gail E. Kaiser", "misc": "2002-01-03 409-433 1995 Modern Database Systems db/books/collections/kim95.html#Kaiser95" }
+{ "id": 10, "dblpid": "books/acm/kim95/KelleyGKRG95", "title": "Schema Architecture of the UniSQL/M Multidatabase System", "authors": "William Kelley Sunit K. Gala Won Kim Tom C. Reyes Bruce Graham", "misc": "2004-03-08 Modern Database Systems books/acm/Kim95 621-648 1995 db/books/collections/kim95.html#KelleyGKRG95" }
+{ "id": 11, "dblpid": "books/acm/kim95/KemperM95", "title": "Physical Object Management.", "authors": "Alfons Kemper Guido Moerkotte", "misc": "2002-01-03 175-202 1995 Modern Database Systems db/books/collections/kim95.html#KemperM95" }
+{ "id": 12, "dblpid": "books/acm/kim95/Kim95", "title": "Introduction to Part 1  Next-Generation Database Technology.", "authors": "Won Kim", "misc": "2002-01-03 5-17 1995 Modern Database Systems db/books/collections/kim95.html#Kim95" }
+{ "id": 13, "dblpid": "books/acm/kim95/Kim95a", "title": "Object-Oriented Database Systems  Promises, Reality, and Future.", "authors": "Won Kim", "misc": "2002-01-03 255-280 1995 Modern Database Systems db/books/collections/kim95.html#Kim95a" }
+{ "id": 14, "dblpid": "books/acm/kim95/Kim95b", "title": "Introduction to Part 2  Technology for Interoperating Legacy Databases.", "authors": "Won Kim", "misc": "2002-01-03 515-520 1995 Modern Database Systems db/books/collections/kim95.html#Kim95b" }
+{ "id": 15, "dblpid": "books/acm/kim95/KimCGS95", "title": "On Resolving Schematic Heterogeneity in Multidatabase Systems.", "authors": "Won Kim Injun Choi Sunit K. Gala Mark Scheevel", "misc": "2002-01-03 521-550 1995 Modern Database Systems db/books/collections/kim95.html#KimCGS95" }
+{ "id": 16, "dblpid": "books/acm/kim95/KimG95", "title": "Requirements for a Performance Benchmark for Object-Oriented Database Systems.", "authors": "Won Kim Jorge F. Garza", "misc": "2002-01-03 203-215 1995 Modern Database Systems db/books/collections/kim95.html#KimG95" }
+{ "id": 17, "dblpid": "books/acm/kim95/KimK95", "title": "On View Support in Object-Oriented Databases Systems.", "authors": "Won Kim William Kelley", "misc": "2002-01-03 108-129 1995 Modern Database Systems db/books/collections/kim95.html#KimK95" }
+{ "id": 18, "dblpid": "books/acm/kim95/Kowalski95", "title": "The POSC Solution to Managing E&P Data.", "authors": "Vincent J. Kowalski", "misc": "2002-01-03 281-301 1995 Modern Database Systems db/books/collections/kim95.html#Kowalski95" }
+{ "id": 19, "dblpid": "books/acm/kim95/KriegerA95", "title": "C++ Bindings to an Object Database.", "authors": "David Krieger Tim Andrews", "misc": "2002-01-03 89-107 1995 Modern Database Systems db/books/collections/kim95.html#KriegerA95" }
+{ "id": 20, "dblpid": "books/acm/kim95/Lunt95", "title": "Authorization in Object-Oriented Databases.", "authors": "Teresa F. Lunt", "misc": "2002-01-03 130-145 1995 Modern Database Systems db/books/collections/kim95.html#Lunt95" }
+{ "id": 21, "dblpid": "books/acm/kim95/MengY95", "title": "Query Processing in Multidatabase Systems.", "authors": "Weiyi Meng Clement T. Yu", "misc": "2002-01-03 551-572 1995 Modern Database Systems db/books/collections/kim95.html#MengY95" }
+{ "id": 22, "dblpid": "books/acm/kim95/Motro95", "title": "Management of Uncerainty in database Systems.", "authors": "Amihai Motro", "misc": "2002-01-03 457-476 1995 Modern Database Systems db/books/collections/kim95.html#Motro95" }
+{ "id": 23, "dblpid": "books/acm/kim95/Omiecinski95", "title": "Parallel Relational Database Systems.", "authors": "Edward Omiecinski", "misc": "2002-01-03 494-512 1995 Modern Database Systems db/books/collections/kim95.html#Omiecinski95" }
+{ "id": 24, "dblpid": "books/acm/kim95/OzsuB95", "title": "Query Processing in Object-Oriented Database Systems.", "authors": "M. Tamer Özsu José A. Blakeley", "misc": "2002-01-03 146-174 1995 Modern Database Systems db/books/collections/kim95.html#OzsuB95" }
+{ "id": 25, "dblpid": "books/acm/kim95/RusinkiewiczS95", "title": "Specification and Execution of Transactional Workflows.", "authors": "Marek Rusinkiewicz Amit P. Sheth", "misc": "2004-03-08 592-620 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#RusinkiewiczS95 1995" }
+{ "id": 26, "dblpid": "books/acm/kim95/Samet95", "title": "Spatial Data Structures.", "authors": "Hanan Samet", "misc": "2004-03-08 361-385 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Samet95 1995" }
+{ "id": 27, "dblpid": "books/acm/kim95/SametA95", "title": "Spatial Data Models and Query Processing.", "authors": "Hanan Samet Walid G. Aref", "misc": "2002-01-03 338-360 1995 Modern Database Systems db/books/collections/kim95.html#SametA95" }
+{ "id": 28, "dblpid": "books/acm/kim95/ShanADDK95", "title": "Pegasus  A Heterogeneous Information Management System.", "authors": "Ming-Chien Shan Rafi Ahmed Jim Davis Weimin Du William Kent", "misc": "2004-03-08 664-682 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#ShanADDK95 1995" }
+{ "id": 29, "dblpid": "books/acm/kim95/Snodgrass95", "title": "Temporal Object-Oriented Databases  A Critical Comparison.", "authors": "Richard T. Snodgrass", "misc": "2002-01-03 386-408 1995 Modern Database Systems db/books/collections/kim95.html#Snodgrass95" }
+{ "id": 30, "dblpid": "books/acm/kim95/SoleyK95", "title": "The OMG Object Model.", "authors": "Richard Mark Soley William Kent", "misc": "2002-01-03 18-41 1995 Modern Database Systems db/books/collections/kim95.html#SoleyK95" }
+{ "id": 31, "dblpid": "books/acm/kim95/Stout95", "title": "EDA/SQL.", "authors": "Ralph L. Stout", "misc": "2004-03-08 649-663 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Stout95 1995" }
+{ "id": 32, "dblpid": "books/acm/kim95/Thompson95", "title": "The Changing Database Standards Landscape.", "authors": "Craig W. Thompson", "misc": "2002-01-03 302-317 1995 Modern Database Systems db/books/collections/kim95.html#Thompson95" }
+{ "id": 33, "dblpid": "books/acm/kim95/BreitbartR95", "title": "Overview of the ADDS System.", "authors": "Yuri Breitbart Tom C. Reyes", "misc": "2009-06-12 683-701 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartR95 1995" }
+{ "id": 34, "dblpid": "books/acm/Kim95", "title": "Modern Database Systems  The Object Model, Interoperability, and Beyond.", "authors": "", "misc": "2004-03-08 Won Kim Modern Database Systems ACM Press and Addison-Wesley 1995 0-201-59098-0 db/books/collections/kim95.html" }
+{ "id": 35, "dblpid": "books/ap/MarshallO79", "title": "Inequalities  Theory of Majorization and Its Application.", "authors": "Albert W. Marshall Ingram Olkin", "misc": "2002-01-03 Academic Press 1979 0-12-473750-1" }
+{ "id": 36, "dblpid": "books/aw/kimL89/BjornerstedtH89", "title": "Version Control in an Object-Oriented Architecture.", "authors": "Anders Björnerstedt Christer Hulten", "misc": "2006-02-24 451-485 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BjornerstedtH89" }
+{ "id": 37, "dblpid": "books/aw/kimL89/BretlMOPSSWW89", "title": "The GemStone Data Management System.", "authors": "Robert Bretl David Maier Allen Otis D. Jason Penney Bruce Schuchardt Jacob Stein E. Harold Williams Monty Williams", "misc": "2002-01-03 283-308 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BretlMOPSSWW89" }
+{ "id": 38, "dblpid": "books/aw/kimL89/CareyDRS89", "title": "Storage Management in EXODUS.", "authors": "Michael J. Carey David J. DeWitt Joel E. Richardson Eugene J. Shekita", "misc": "2002-01-03 341-369 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#CareyDRS89" }
+{ "id": 39, "dblpid": "books/aw/kimL89/Decouchant89", "title": "A Distributed Object Manager for the Smalltalk-80 System.", "authors": "Dominique Decouchant", "misc": "2002-01-03 487-520 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Decouchant89" }
+{ "id": 40, "dblpid": "books/aw/kimL89/DiederichM89", "title": "Objects, Messages, and Rules in Database Design.", "authors": "Jim Diederich Jack Milton", "misc": "2002-01-03 177-197 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#DiederichM89" }
+{ "id": 41, "dblpid": "books/aw/kimL89/EllisG89", "title": "Active Objects  Ealities and Possibilities.", "authors": "Clarence A. Ellis Simon J. Gibbs", "misc": "2002-01-03 561-572 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#EllisG89" }
+{ "id": 42, "dblpid": "books/aw/kimL89/FishmanABCCDHHKLLMNRSW89", "title": "Overview of the Iris DBMS.", "authors": "Daniel H. Fishman Jurgen Annevelink David Beech E. C. Chow Tim Connors J. W. Davis Waqar Hasan C. G. Hoch William Kent S. Leichner Peter Lyngbæk Brom Mahbod Marie-Anne Neimat Tore Risch Ming-Chien Shan W. Kevin Wilkinson", "misc": "2002-01-03 219-250 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#FishmanABCCDHHKLLMNRSW89" }
+{ "id": 43, "dblpid": "books/aw/kimL89/KimBCGW89", "title": "Features of the ORION Object-Oriented Database System.", "authors": "Won Kim Nat Ballou Hong-Tai Chou Jorge F. Garza Darrell Woelk", "misc": "2002-01-03 251-282 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimBCGW89" }
+{ "id": 44, "dblpid": "books/aw/kimL89/KimKD89", "title": "Indexing Techniques for Object-Oriented Databases.", "authors": "Won Kim Kyung-Chang Kim Alfred G. Dale", "misc": "2002-01-03 371-394 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimKD89" }
+{ "id": 45, "dblpid": "books/aw/kimL89/King89", "title": "My Cat Is Object-Oriented.", "authors": "Roger King", "misc": "2002-01-03 23-30 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#King89" }
+{ "id": 46, "dblpid": "books/aw/kimL89/Maier89", "title": "Making Database Systems Fast Enough for CAD Applications.", "authors": "David Maier", "misc": "2002-01-03 573-582 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Maier89" }
+{ "id": 47, "dblpid": "books/aw/kimL89/MellenderRS89", "title": "Optimizing Smalltalk Message Performance.", "authors": "Fred Mellender Steve Riegel Andrew Straw", "misc": "2002-01-03 423-450 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#MellenderRS89" }
+{ "id": 48, "dblpid": "books/aw/kimL89/Moon89", "title": "The Common List Object-Oriented Programming Language Standard.", "authors": "David A. Moon", "misc": "2002-01-03 49-78 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moon89" }
+{ "id": 49, "dblpid": "books/aw/kimL89/Moss89", "title": "Object Orientation as Catalyst for Language-Database Inegration.", "authors": "J. Eliot B. Moss", "misc": "2002-01-03 583-592 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moss89" }
+{ "id": 50, "dblpid": "books/aw/kimL89/Nierstrasz89", "title": "A Survey of Object-Oriented Concepts.", "authors": "Oscar Nierstrasz", "misc": "2002-01-03 3-21 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Nierstrasz89" }
+{ "id": 51, "dblpid": "books/aw/kimL89/NierstraszT89", "title": "Integrated Office Systems.", "authors": "Oscar Nierstrasz Dennis Tsichritzis", "misc": "2002-01-03 199-215 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#NierstraszT89" }
+{ "id": 52, "dblpid": "books/aw/kimL89/Russinoff89", "title": "Proteus  A Frame-Based Nonmonotonic Inference System.", "authors": "David M. Russinoff", "misc": "2002-01-03 127-150 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#Russinoff89" }
+{ "id": 53, "dblpid": "books/aw/kimL89/SkarraZ89", "title": "Concurrency Control and Object-Oriented Databases.", "authors": "Andrea H. Skarra Stanley B. Zdonik", "misc": "2002-01-03 395-421 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SkarraZ89" }
+{ "id": 54, "dblpid": "books/aw/kimL89/SteinLU89", "title": "A Shared View of Sharing  The Treaty of Orlando.", "authors": "Lynn Andrea Stein Henry Lieberman David Ungar", "misc": "2002-01-03 31-48 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SteinLU89" }
+{ "id": 55, "dblpid": "books/aw/kimL89/TarltonT89", "title": "Pogo  A Declarative Representation System for Graphics.", "authors": "Mark A. Tarlton P. Nong Tarlton", "misc": "2002-01-03 151-176 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TarltonT89" }
+{ "id": 56, "dblpid": "books/aw/kimL89/TomlinsonS89", "title": "Concurrent Object-Oriented Programming Languages.", "authors": "Chris Tomlinson Mark Scheevel", "misc": "2002-01-03 79-124 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TomlinsonS89" }
+{ "id": 57, "dblpid": "books/aw/kimL89/TsichritzisN89", "title": "Directions in Object-Oriented Research.", "authors": "Dennis Tsichritzis Oscar Nierstrasz", "misc": "2002-01-03 523-536 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TsichritzisN89" }
+{ "id": 58, "dblpid": "books/aw/kimL89/Wand89", "title": "A Proposal for a Formal Model of Objects.", "authors": "Yair Wand", "misc": "2002-01-03 537-559 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Wand89" }
+{ "id": 59, "dblpid": "books/aw/kimL89/WeiserL89", "title": "OZ+  An Object-Oriented Database System.", "authors": "Stephen P. Weiser Frederick H. Lochovsky", "misc": "2002-01-03 309-337 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#WeiserL89" }
+{ "id": 60, "dblpid": "books/aw/stonebraker86/RoweS86", "title": "The Commercial INGRES Epilogue.", "authors": "Lawrence A. Rowe Michael Stonebraker", "misc": "2002-01-03 63-82 1986 The INGRES Papers db/books/collections/Stonebraker86.html#RoweS86 db/books/collections/Stonebraker86/RoweS86.html ingres/P063.pdf" }
+{ "id": 61, "dblpid": "books/aw/stonebraker86/Stonebraker86", "title": "Design of Relational Systems (Introduction to Section 1).", "authors": "Michael Stonebraker", "misc": "2002-01-03 1-3 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86 db/books/collections/Stonebraker86/Stonebraker86.html ingres/P001.pdf" }
+{ "id": 62, "dblpid": "books/aw/stonebraker86/Stonebraker86a", "title": "Supporting Studies on Relational Systems (Introduction to Section 2).", "authors": "Michael Stonebraker", "misc": "2002-01-03 83-85 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86a db/books/collections/Stonebraker86/Stonebraker86a.html ingres/P083.pdf" }
+{ "id": 63, "dblpid": "books/aw/stonebraker86/Stonebraker86b", "title": "Distributed Database Systems (Introduction to Section 3).", "authors": "Michael Stonebraker", "misc": "2002-01-03 183-186 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86b db/books/collections/Stonebraker86/Stonebraker86b.html ingres/P183.pdf" }
+{ "id": 64, "dblpid": "books/aw/stonebraker86/Stonebraker86c", "title": "The Design and Implementation of Distributed INGRES.", "authors": "Michael Stonebraker", "misc": "2002-01-03 187-196 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86c db/books/collections/Stonebraker86/Stonebraker86c.html ingres/P187.pdf" }
+{ "id": 65, "dblpid": "books/aw/stonebraker86/Stonebraker86d", "title": "User Interfaces for Database Systems (Introduction to Section 4).", "authors": "Michael Stonebraker", "misc": "2002-01-03 243-245 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86d db/books/collections/Stonebraker86/Stonebraker86d.html ingres/P243.pdf" }
+{ "id": 66, "dblpid": "books/aw/stonebraker86/Stonebraker86e", "title": "Extended Semantics for the Relational Model (Introduction to Section 5).", "authors": "Michael Stonebraker", "misc": "2002-01-03 313-316 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86e db/books/collections/Stonebraker86/Stonebraker86e.html ingres/P313.pdf" }
+{ "id": 67, "dblpid": "books/aw/stonebraker86/Stonebraker86f", "title": "Database Design (Introduction to Section 6).", "authors": "Michael Stonebraker", "misc": "2002-01-03 393-394 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86f db/books/collections/Stonebraker86/Stonebraker86f.html ingres/P393.pdf" }
+{ "id": 68, "dblpid": "books/aw/stonebraker86/X86", "title": "Title, Preface, Contents.", "authors": "", "misc": "2002-01-03 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86 db/books/collections/Stonebraker86/X86.html ingres/frontmatter.pdf" }
+{ "id": 69, "dblpid": "books/aw/stonebraker86/X86a", "title": "References.", "authors": "", "misc": "2002-01-03 429-444 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86a db/books/collections/Stonebraker86/X86a.html ingres/P429.pdf" }
+{ "id": 70, "dblpid": "books/aw/Knuth86a", "title": "TeX  The Program", "authors": "Donald E. Knuth", "misc": "2002-01-03 Addison-Wesley 1986 0-201-13437-3" }
+{ "id": 71, "dblpid": "books/aw/AbiteboulHV95", "title": "Foundations of Databases.", "authors": "Serge Abiteboul Richard Hull Victor Vianu", "misc": "2002-01-03 Addison-Wesley 1995 0-201-53771-0 AHV/Toc.pdf ... ... journals/tods/AstrahanBCEGGKLMMPTWW76 books/bc/AtzeniA93 journals/tcs/AtzeniABM82 journals/jcss/AbiteboulB86 journals/csur/AtkinsonB87 conf/pods/AtzeniB87 journals/vldb/AbiteboulB95 conf/sigmod/AbiteboulB91 conf/dood/AtkinsonBDDMZ89 conf/vldb/AlbanoBGO93 ... conf/icdt/Abiteboul88 journals/ipl/Abiteboul89 conf/ds/Abrial74 journals/tods/AhoBU79 books/mk/minker88/AptBW88 conf/vldb/AroraC78 conf/stoc/AfratiC89 journals/tods/AlbanoCO85 conf/pods/AfratiCY91 conf/pods/AusielloDM85 conf/vldb/AbiteboulG85 journals/jacm/AjtaiG87 conf/focs/AjtaiG89 journals/tods/AbiteboulG91 ... ... journals/tods/AbiteboulH87 conf/sigmod/AbiteboulH88 ... conf/sigmod/AbiteboulK89 journals/tcs/AbiteboulKG91 journals/jcss/AbiteboulKRW95 conf/sigmod/AbiteboulLUW93 conf/pods/AtzeniP82 conf/pods/AfratiP87 conf/pods/AptP87 conf/wg/AndriesP91 conf/pods/AfratiPPRSU86 books/el/leeuwen90/Apt90 conf/ifip/Armstrong74 journals/siamcomp/AhoSSU81 journals/tods/AhoSU79 journals/siamcomp/AhoSU79 conf/pods/AbiteboulSV90 journals/is/AtzeniT93 conf/popl/AhoU79 conf/pods/AbiteboulV87 conf/jcdkb/AbiteboulV88 journals/jacm/AbiteboulV88 conf/pods/AbiteboulV88 journals/jacm/AbiteboulV89 journals/jcss/AbiteboulV90 journals/jcss/AbiteboulV91 conf/stoc/AbiteboulV91 journals/amai/AbiteboulV91 journals/jcss/AbiteboulV95 journals/jacm/AptE82 conf/coco/AbiteboulVV92 conf/iclp/AptB88 conf/oopsla/BobrowKKMSZ86 journals/tse/BatoryBGSTTW88 conf/mfcs/Bancilhon78 ... conf/db-workshops/Bancilhon85 books/el/leeuwen90/Barendregt90 ... journals/tods/BeeriB79 books/el/leeuwen90/BerstelB90 conf/icdt/BeneventanoB92 conf/vldb/BernsteinBC80 conf/vldb/BeeriBG78 conf/sigmod/BorgidaBMR89 journals/tods/BunemanC79 journals/jacm/BernsteinC81 conf/dbpl/BancilhonCD89 books/bc/tanselCGSS93/BaudinetCW93 conf/sigmod/BiskupDB79 journals/jacm/BeeriDFS84 books/mk/BancilhonDK92 conf/edbt/BryDM88 conf/pods/BunemanDW88 journals/jcss/BunemanDW91 journals/tods/Beeri80 journals/dke/Beeri90 ... journals/tods/Bernstein76 conf/lics/BidoitF87 journals/iandc/BidoitF91 conf/sigmod/BeeriFH77 conf/stoc/BeeriFMMUY81 journals/jacm/BeeriFMY83 journals/tods/BunemanFN82 journals/siamcomp/BernsteinG81 journals/iandc/BlassGK85 conf/ijcai/BrachmanGL85 journals/tods/BernsteinGWRR81 books/aw/BernsteinHG87 ... journals/tcs/Bidoit91 journals/tcs/Biskup80 conf/adbt/Biskup79 journals/tods/Biskup83 journals/tcs/BunemanJO91 journals/tods/BeeriK86 conf/pods/BeeriKBR87 conf/icdt/BidoitL90 journals/csur/BatiniL86 conf/sigmod/BlakeleyLT86 conf/vldb/BeeriM91 conf/sigmod/BlakeleyMG93 journals/siamcomp/BeeriMSU81 conf/pods/BancilhonMSU86 conf/pods/BeeriNRST87 journals/software/Borgida85 conf/icalp/BraP83 conf/fgcs/BalbinMR88 ... conf/pods/BeeriR87 journals/jlp/BalbinR87 conf/sigmod/BancilhonR86 books/mk/minker88/BancilhonR88 journals/jlp/BeeriR91 conf/vldb/BancilhonRS82 conf/pods/BeeriRSS92 conf/dood/Bry89 journals/tods/BancilhonS81 journals/cogsci/BrachmanS85 journals/tods/BergamaschiS92 conf/sigmod/BernsteinST75 conf/dbpl/TannenBN91 conf/icdt/TannenBW92 ... journals/jacm/BeeriV84 conf/icalp/BeeriV81 conf/adbt/BeeriV79 journals/siamcomp/BeeriV84 journals/iandc/BeeriV84 journals/jacm/BeeriV84 journals/tcs/BeeriV85 journals/ibmrd/ChamberlinAEGLMRW76 ... journals/iandc/Cardelli88 books/mk/Cattell94 conf/sigmod/CacaceCCTZ90 conf/vldb/CastilhoCF82 conf/adbt/CasanovaF82 conf/focs/CaiFI89 journals/jcss/CasanovaFP84 conf/stoc/CosmadakisGKV88 conf/dood/CorciuloGP93 books/sp/CeriGT90 conf/focs/ChandraH80 journals/jcss/ChandraH80 journals/jcss/ChandraH82 journals/jlp/ChandraH85 conf/popl/Chandra81 conf/adbt/Chang79 conf/pods/Chandra88 ... journals/tods/Chen76 conf/ride/ChenHM94 conf/icde/Chomicki92 conf/pods/Chomicki92 ... ... ... conf/stoc/CosmadakisK85 journals/acr/CosmadakisK86 ... journals/jcss/CosmadakisKS86 journals/jacm/CosmadakisKV90 ... conf/pods/CalvaneseL94 conf/adbt/Clark77 conf/stoc/ChandraLM81 conf/stoc/ChandraM77 conf/pods/ConsensM90 conf/sigmod/ConsensM93 conf/icdt/ConsensM90 journals/cacm/Codd70 conf/sigmod/Codd71a persons/Codd71a persons/Codd72 conf/ifip/Codd74 ... conf/sigmod/Codd79 journals/cacm/Codd82 ... conf/sigmod/Cohen89 journals/cacm/Cohen90 ... journals/jcss/Cook74 conf/pods/Cosmadakis83 conf/focs/Cosmadakis87 books/el/leeuwen90/Courcelle90a journals/jacm/CosmadakisP84 conf/edbt/CeriCGLLTZ88 ... conf/vldb/CeriT87 conf/vldb/CasanovaTF88 ... conf/pods/CasanovaV83 journals/siamcomp/ChandraV85 conf/pods/ChaudhuriV92 conf/pods/ChaudhuriV93 conf/pods/ChaudhuriV94 journals/csur/CardelliW85 conf/pods/ChenW89 conf/pods/CohenW89 conf/vldb/CeriW90 conf/vldb/CeriW91 conf/iclp/ChenW92 conf/vldb/CeriW93 ... conf/birthday/Dahlhaus87 conf/vldb/Date81 books/aw/Date86 ... conf/dbpl/Dayal89 journals/tods/DayalB82 journals/ibmrd/DelobelC73 conf/icde/DelcambreD89 ... journals/tods/Delobel78 journals/jacm/Demolombe92 journals/tods/DateF92 ... conf/vldb/DayalHL91 journals/jacm/Paola69a conf/caap/DahlhausM86 journals/acr/DAtriM86 journals/iandc/DahlhausM92 conf/sigmod/DerrMP93 conf/vldb/MaindrevilleS88 conf/pods/Dong92 conf/adbt/BraP82 ... conf/dbpl/DongS91 journals/iandc/DongS95 conf/dbpl/DongS93 conf/dbpl/DongS93 conf/icdt/DongT92 conf/vldb/DenninghoffV91 conf/pods/DenninghoffV93 ... ... books/acm/kim95/DayalHW95 ... conf/pods/EiterGM94 conf/pods/Escobar-MolanoHJ93 ... books/el/leeuwen90/Emerson90 books/bc/ElmasriN89 ... conf/icse/Eswaran76 conf/sigmod/EpsteinSW78 ... ... conf/vldb/Fagin77 journals/tods/Fagin77 conf/sigmod/Fagin79 journals/tods/Fagin81 journals/ipl/FaginV83 journals/jacm/Fagin82 journals/jacm/Fagin83 journals/tcs/Fagin93 books/sp/kimrb85/FurtadoC85 ... journals/jlp/Fitting85a journals/tcs/FischerJT83 journals/acr/FaginKUV86 conf/icdt/FernandezM92 journals/tods/FaginMU82 conf/vldb/FaloutsosNS91 ... journals/ai/Forgy82 ... conf/sigmod/Freytag87 ... journals/siamcomp/FischerT83 journals/siamcomp/FaginMUY83 conf/pods/FaginUV83 conf/icalp/FaginV84 ... ... ... ... conf/sigmod/GraefeD87 conf/ride/GatziuD94 conf/sigmod/GardarinM86 conf/sigmod/GyssensG88 journals/tcs/GinsburgH83a journals/jacm/GinsburgH86 ... books/bc/tanselCGSS93/Ginsburg93 books/fm/GareyJ79 journals/jacm/GrantJ82 conf/vldb/GehaniJ91 conf/vldb/GhandeharizadehHJCELLTZ93 journals/tods/GhandeharizadehHJ96 conf/vldb/GehaniJS92 ... conf/sigmod/GehaniJS92 ... conf/deductive/GuptaKM92 conf/pods/GurevichL82 conf/iclp/GelfondL88 conf/adbt/77 journals/csur/GallaireMN84 conf/pods/GrahneMR92 conf/sigmod/GuptaMS93 conf/lics/GaifmanMSV87 journals/jacm/GaifmanMSV93 journals/jacm/GrahamMV86 conf/csl/GradelO92 ... conf/pods/Gottlob87 conf/pods/GyssensPG90 conf/dood/GiannottiPSZ91 books/aw/GoldbergR83 journals/acr/GrahneR86 journals/ipl/Grant77 ... journals/iandc/Grandjean83 conf/vldb/Grahne84 ... journals/csur/Graefe93 books/sp/Greibach75 journals/tods/GoodmanS82 journals/jcss/GoodmanS84 conf/focs/GurevichS85 ... conf/pods/GrumbachS94 conf/sigmod/GangulyST90 ... journals/tcs/Gunter92 ... ... ... ... conf/pods/GrahamV84 conf/pods/GrumbachV91 conf/icde/GardarinV92 conf/sigmod/GraefeW89 ... journals/jacm/GinsburgZ82 conf/vldb/GottlobZ88 ... ... journals/sigmod/Hanson89 ... journals/cacm/Harel80 journals/tkde/HaasCLMWLLPCS90 conf/lics/Hella92 journals/iandc/Herrmann95 conf/pods/HirstH93 conf/vldb/HullJ91 conf/ewdw/HullJ90 journals/csur/HullK87 journals/tods/HudsonK89 conf/lics/HillebrandKM93 conf/nato/HillebrandKR93 conf/jcdkb/HsuLM88 journals/ipl/HoneymanLY80 journals/tods/HammerM81 conf/adbt/HenschenMN82 ... journals/jacm/HenschenN84 journals/jacm/Honeyman82 conf/sigmod/HullS89 conf/pods/HullS89 journals/acta/HullS94 journals/jcss/HullS93 conf/fodo/HullTY89 journals/jcss/Hull83 journals/jacm/Hull84 journals/tcs/Hull85 journals/siamcomp/Hull86 ... conf/vldb/Hulin89 ... journals/jacm/HullY84 conf/vldb/HullY90 conf/pods/HullY91 conf/sigmod/IoannidisK90 journals/jcss/ImielinskiL84 conf/adbt/Imielinski82 journals/jcss/Immerman82 journals/iandc/Immerman86 ... journals/siamcomp/Immerman87 conf/pods/ImielinskiN88 conf/vldb/IoannidisNSS92 conf/sigmod/ImielinskiNV91 conf/dood/ImielinskiNV91 conf/vldb/Ioannidis85 journals/jacm/Jacobs82 conf/dbpl/JacobsH91 journals/csur/JarkeK84 journals/jcss/JohnsonK84 conf/popl/JaffarL87 books/el/leeuwen90/Johnson90 journals/jacm/Joyner76 conf/pods/JaeschkeS82 ... books/mk/minker88/Kanellakis88 books/el/leeuwen90/Kanellakis90 conf/oopsla/KhoshafianC86 conf/edbt/KotzDM88 conf/jcdkb/Keller82 conf/pods/Keller85 journals/computer/Keller86 ... journals/tods/Kent79 ... journals/ngc/RohmerLK86 conf/tacs/KanellakisG94 conf/jcdkb/Kifer88 conf/pods/KanellakisKR90 conf/sigmod/KiferKS92 ... conf/icdt/KiferL86 books/aw/KimL89 ... journals/tods/Klug80 journals/jacm/Klug82 journals/jacm/Klug88 journals/jacm/KiferLW95 conf/kr/KatsunoM91 journals/ai/KatsunoM92 conf/jcdkb/KrishnamurthyN88 journals/csur/Knight89 ... journals/iandc/Kolaitis91 journals/ai/Konolige88 conf/ifip/Kowalski74 journals/jacm/Kowalski75 conf/bncod/Kowalski84 conf/vldb/KoenigP81 journals/tods/KlugP82 ... conf/pods/KolaitisP88 conf/pods/KiferRS88 conf/sigmod/KrishnamurthyRS88 books/mg/SilberschatzK91 conf/iclp/KempT88 conf/sigmod/KellerU84 conf/dood/Kuchenhoff91 ... journals/jlp/Kunen87 conf/iclp/Kunen88 conf/pods/Kuper87 conf/pods/Kuper88 conf/ppcp/Kuper93 conf/pods/KuperV84 conf/stoc/KolaitisV87 journals/tcs/KarabegV90 journals/iandc/KolaitisV90 conf/pods/KolaitisV90 journals/tods/KarabegV91 journals/iandc/KolaitisV92 journals/tcs/KuperV93 journals/tods/KuperV93 journals/tse/KellerW85 conf/pods/KiferW89 conf/jcdkb/Lang88 books/el/Leeuwen90 ... journals/jcss/Leivant89 ... journals/iandc/Leivant90 ... conf/db-workshops/Levesque82 journals/ai/Levesque84 conf/mfdbs/Libkin91 conf/er/Lien79 journals/jacm/Lien82 books/mk/minker88/Lifschitz88 ... journals/tcs/Lindell91 journals/tods/Lipski79 journals/jacm/Lipski81 journals/tcs/LeratL86 journals/cj/LeveneL90 books/sp/Lloyd87 conf/pods/LakshmananM89 conf/tlca/LeivantM93 conf/sigmod/LaverMG83 conf/pods/LiptonN90 journals/jcss/LucchesiO78 conf/sigmod/Lohman88 ... conf/ijcai/Lozinskii85 books/ph/LewisP81 ... conf/sigmod/LecluseRV88 journals/is/LipeckS87 journals/jlp/LloydST87 journals/tods/LingTK81 conf/sigmod/LyngbaekV87 conf/dood/LefebvreV89 conf/pods/LibkinW93 conf/dbpl/LibkinW93 journals/jacm/Maier80 books/cs/Maier83 ... conf/vldb/Makinouchi77 conf/icalp/Makowsky81 ... conf/icdt/Malvestuto86 conf/aaai/MacGregorB92 journals/tods/MylopoulosBW80 conf/sigmod/McCarthyD89 journals/csur/MishraE92 conf/sigmod/MumickFPR90 books/mk/Minker88 journals/jlp/Minker88 conf/vldb/MillerIR93 journals/is/MillerIR94 journals/iandc/Mitchell83 conf/pods/Mitchell83 conf/vldb/MendelzonM79 journals/tods/MaierMS79 journals/jcss/MaierMSU80 conf/pods/MendelzonMW94 journals/debu/MorrisNSUG87 journals/ai/Moore85 conf/vldb/Morgenstern83 conf/pods/Morris88 ... conf/pods/MannilaR85 ... journals/jlp/MinkerR90 books/aw/MannilaR92 journals/acr/MaierRW86 ... journals/tods/MarkowitzS92 conf/pods/Marchetti-SpaccamelaPS87 journals/jacm/MaierSY81 conf/iclp/MorrisUG86 journals/tods/MaierUV84 conf/iclp/MorrisUG86 journals/acta/MakowskyV86 books/bc/MaierW88 books/mk/minker88/ManchandraW88 conf/pods/Naughton86 conf/sigmod/NgFS91 ... conf/vldb/Nejdl87 conf/adbt/NicolasM77 conf/sigmod/Nicolas78 journals/acta/Nicolas82 conf/ds/76 conf/pods/NaqviK88 journals/tods/NegriPS91 conf/vldb/NaughtonRSU89 conf/pods/NaughtonS87 ... ... conf/vldb/Osborn79 ... journals/tods/OzsoyogluY87 conf/adbt/Paige82 ... books/cs/Papadimitriou86 ... journals/ipl/Paredaens78 ... books/sp/ParedaensBGG89 journals/ai/Andersen91 books/el/leeuwen90/Perrin90 journals/ins/Petrov89 conf/pods/ParedaensG88 conf/pods/PatnaikI94 conf/adbt/ParedaensJ79 journals/csur/PeckhamM88 ... ... conf/sigmod/ParkerP80 ... conf/iclp/Przymusinski88 conf/pods/Przymusinski89 ... conf/vldb/ParkerSV92 conf/aaai/PearlV87 journals/ai/PereiraW80a conf/pods/PapadimitriouY92 journals/tkde/QianW91 ... journals/jlp/Ramakrishnan91 conf/pods/RamakrishnanBS87 ... conf/adbt/Reiter77 journals/ai/Reiter80 conf/db-workshops/Reiter82 journals/jacm/Reiter86 journals/tods/Rissanen77 conf/mfcs/Rissanen78 conf/pods/Rissanen82 ... journals/ngc/RohmerLK86 journals/jacm/Robinson65 ... conf/pods/Ross89 ... ... conf/sigmod/RoweS79 conf/sigmod/RichardsonS91 journals/debu/RamamohanaraoSBPNTZD87 conf/vldb/RamakrishnanSS92 conf/sigmod/RamakrishnanSSS93 conf/pods/RamakrishnanSUV89 journals/jcss/RamakrishnanSUV93 journals/jlp/RamakrishnanU95 conf/sigmod/SelingerACLP79 conf/sigmod/Sagiv81 journals/tods/Sagiv83 books/mk/minker88/Sagiv88 conf/slp/Sagiv90 conf/sigmod/Sciore81 journals/jacm/Sciore82 conf/pods/Sciore83 journals/acr/Sciore86 journals/jacm/SagivDPF81 conf/pods/X89 ... journals/ai/SmithG85 books/mk/minker88/Shepherdson88 journals/tods/Shipman81 conf/pods/Shmueli87 conf/iclp/SekiI88 conf/sigmod/ShmueliI84 journals/tc/Sickel76 journals/jsc/Siekmann89 conf/sigmod/StonebrakerJGP90 conf/vldb/SimonKM92 journals/csur/ShethL90 conf/pods/SeibL91 conf/sigmod/SuLRD93 conf/adbt/SilvaM79 journals/sigmod/Snodgrass90 journals/sigmod/Soo91 conf/pods/SuciuP94 conf/sigmod/StonebrakerR86 conf/slp/SudarshanR93 conf/pods/SagivS86 journals/cacm/Stonebraker81 books/mk/Stonebraker88 journals/tkde/Stonebraker92 books/aw/Stroustrup91 journals/jacm/SadriU82 conf/vldb/Su91 conf/pods/SagivV89 journals/jacm/SagivW82 journals/tods/StonebrakerWKH76 journals/jacm/SagivY80 conf/pods/SaccaZ86 journals/tcs/SaccaZ88 ... conf/pods/SaccaZ90 ... ... books/bc/TanselCGJSS93 ... journals/acr/ThomasF86 ... ... ... ... journals/tcs/Topor87 ... books/mk/minker88/ToporS88 ... journals/siamcomp/TarjanY84 journals/csur/TeoreyYF86 journals/algorithmica/UllmanG88 conf/pods/Ullman82 books/cs/Ullman82 journals/tods/Ullman85 books/cs/Ullman88 conf/pods/Ullman89 books/cs/Ullman89 conf/sigmod/Gelder86 ... conf/pods/BusscheG92 conf/focs/BusscheGAG92 conf/pods/BusscheP91 conf/slp/Gelder86 conf/pods/Gelder89 conf/pods/GelderRS88 journals/jacm/GelderRS91 journals/tods/GelderT91 journals/ipl/Vardi81 conf/stoc/Vardi82 conf/focs/Vardi82 journals/acta/Vardi83 journals/jcss/Vardi84 conf/pods/Vardi85 conf/pods/Vardi86 journals/jcss/Vardi86 ... conf/pods/Vardi88 conf/sigmod/Vassiliou79 ... ... journals/jacm/EmdenK76 conf/nf2/SchollABBGPRV87 journals/jacm/Vianu87 journals/acta/Vianu87 conf/eds/Vieille86 conf/iclp/Vieille87 ... conf/eds/Vieille88 journals/tcs/Vieille89 ... journals/tcs/VianuV92 conf/sigmod/WidomF90 conf/icde/WangH92 conf/pos/WidjojoHW90 journals/computer/Wiederhold92 conf/pods/Wilkins86 conf/pods/Winslett88 conf/sigmod/WolfsonO90 conf/pods/Wong93 conf/sigmod/WolfsonS88 journals/ibmrd/WangW75 journals/tods/WongY76 conf/vldb/Yannakakis81 journals/csur/YuC84 ... journals/jcss/YannakakisP82 ... journals/tods/Zaniolo82 journals/jcss/Zaniolo84 ... conf/edbt/ZhouH90 journals/ibmsj/Zloof77 books/mk/ZdonikM90 db/books/dbtext/abiteboul95.html" }
+{ "id": 72, "dblpid": "books/aw/Lamport86", "title": "LaTeX  User's Guide & Reference Manual", "authors": "Leslie Lamport", "misc": "2002-01-03 Addison-Wesley 1986 0-201-15790-X" }
+{ "id": 73, "dblpid": "books/aw/AhoHU74", "title": "The Design and Analysis of Computer Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1974 0-201-00029-6" }
+{ "id": 74, "dblpid": "books/aw/Lamport2002", "title": "Specifying Systems, The TLA+ Language and Tools for Hardware and Software Engineers", "authors": "Leslie Lamport", "misc": "2005-07-28 Addison-Wesley 2002 0-3211-4306-X http //research.microsoft.com/users/lamport/tla/book.html" }
+{ "id": 75, "dblpid": "books/aw/AhoHU83", "title": "Data Structures and Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1983 0-201-00023-7" }
+{ "id": 76, "dblpid": "books/aw/LewisBK01", "title": "Databases and Transaction Processing  An Application-Oriented Approach", "authors": "Philip M. Lewis Arthur J. Bernstein Michael Kifer", "misc": "2002-01-03 Addison-Wesley 2001 0-201-70872-8" }
+{ "id": 77, "dblpid": "books/aw/AhoKW88", "title": "The AWK Programming Language", "authors": "Alfred V. Aho Brian W. Kernighan Peter J. Weinberger", "misc": "2002-01-03 Addison-Wesley 1988" }
+{ "id": 78, "dblpid": "books/aw/LindholmY97", "title": "The Java Virtual Machine Specification", "authors": "Tim Lindholm Frank Yellin", "misc": "2002-01-28 Addison-Wesley 1997 0-201-63452-X" }
+{ "id": 79, "dblpid": "books/aw/AhoSU86", "title": "Compilers  Princiles, Techniques, and Tools.", "authors": "Alfred V. Aho Ravi Sethi Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1986 0-201-10088-6" }
+{ "id": 80, "dblpid": "books/aw/Sedgewick83", "title": "Algorithms", "authors": "Robert Sedgewick", "misc": "2002-01-03 Addison-Wesley 1983 0-201-06672-6" }
+{ "id": 81, "dblpid": "journals/siamcomp/AspnesW96", "title": "Randomized Consensus in Expected O(n log² n) Operations Per Processor.", "authors": "James Aspnes Orli Waarts", "misc": "2002-01-03 1024-1044 1996 25 SIAM J. Comput. 5 db/journals/siamcomp/siamcomp25.html#AspnesW96" }
+{ "id": 82, "dblpid": "conf/focs/AspnesW92", "title": "Randomized Consensus in Expected O(n log ^2 n) Operations Per Processor", "authors": "James Aspnes Orli Waarts", "misc": "2006-04-25 137-146 conf/focs/FOCS33 1992 FOCS db/conf/focs/focs92.html#AspnesW92" }
+{ "id": 83, "dblpid": "journals/siamcomp/Bloniarz83", "title": "A Shortest-Path Algorithm with Expected Time O(n² log n log* n).", "authors": "Peter A. Bloniarz", "misc": "2002-01-03 588-600 1983 12 SIAM J. Comput. 3 db/journals/siamcomp/siamcomp12.html#Bloniarz83" }
+{ "id": 84, "dblpid": "conf/stoc/Bloniarz80", "title": "A Shortest-Path Algorithm with Expected Time O(n^2 log n log ^* n)", "authors": "Peter A. Bloniarz", "misc": "2006-04-25 378-384 conf/stoc/STOC12 1980 STOC db/conf/stoc/stoc80.html#Bloniarz80" }
+{ "id": 85, "dblpid": "journals/siamcomp/Megiddo83a", "title": "Linear-Time Algorithms for Linear Programming in R³ and Related Problems.", "authors": "Nimrod Megiddo", "misc": "2002-01-03 759-776 1983 12 SIAM J. Comput. 4 db/journals/siamcomp/siamcomp12.html#Megiddo83a" }
+{ "id": 86, "dblpid": "conf/focs/Megiddo82", "title": "Linear-Time Algorithms for Linear Programming in R^3 and Related Problems", "authors": "Nimrod Megiddo", "misc": "2006-04-25 329-338 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#Megiddo82" }
+{ "id": 87, "dblpid": "journals/siamcomp/MoffatT87", "title": "An All Pairs Shortest Path Algorithm with Expected Time O(n² log n).", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2002-01-03 1023-1031 1987 16 SIAM J. Comput. 6 db/journals/siamcomp/siamcomp16.html#MoffatT87" }
+{ "id": 88, "dblpid": "conf/focs/MoffatT85", "title": "An All Pairs Shortest Path Algorithm with Expected Running Time O(n^2 log n)", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2006-04-25 101-105 conf/focs/FOCS26 1985 FOCS db/conf/focs/focs85.html#MoffatT85" }
+{ "id": 89, "dblpid": "conf/icip/SchonfeldL98", "title": "VORTEX  Video Retrieval and Tracking from Compressed Multimedia Databases.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-11-05 123-127 1998 ICIP (3) db/conf/icip/icip1998-3.html#SchonfeldL98" }
+{ "id": 90, "dblpid": "conf/hicss/SchonfeldL99", "title": "VORTEX  Video Retrieval and Tracking from Compressed Multimedia Databases ¾ Visual Search Engine.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-01-03 1999 HICSS http //computer.org/proceedings/hicss/0001/00013/00013006abs.htm db/conf/hicss/hicss1999-3.html#SchonfeldL99" }
+{ "id": 91, "dblpid": "journals/corr/abs-0802-2861", "title": "Geometric Set Cover and Hitting Sets for Polytopes in $R^3$", "authors": "Sören Laue", "misc": "2008-03-03 http //arxiv.org/abs/0802.2861 2008 CoRR abs/0802.2861 db/journals/corr/corr0802.html#abs-0802-2861 informal publication" }
+{ "id": 92, "dblpid": "conf/stacs/Laue08", "title": "Geometric Set Cover and Hitting Sets for Polytopes in R³.", "authors": "Sören Laue", "misc": "2008-03-04 2008 STACS 479-490 http //drops.dagstuhl.de/opus/volltexte/2008/1367 conf/stacs/2008 db/conf/stacs/stacs2008.html#Laue08" }
+{ "id": 93, "dblpid": "journals/iandc/IbarraJCR91", "title": "Some Classes of Languages in NC¹", "authors": "Oscar H. Ibarra Tao Jiang Jik H. Chang Bala Ravikumar", "misc": "2006-04-25 86-106 Inf. Comput. January 1991 90 1 db/journals/iandc/iandc90.html#IbarraJCR91" }
+{ "id": 94, "dblpid": "conf/awoc/IbarraJRC88", "title": "On Some Languages in NC.", "authors": "Oscar H. Ibarra Tao Jiang Bala Ravikumar Jik H. Chang", "misc": "2002-08-06 64-73 1988 conf/awoc/1988 AWOC db/conf/awoc/awoc88.html#IbarraJRC88" }
+{ "id": 95, "dblpid": "journals/jacm/GalilHLSW87", "title": "An O(n³log n) deterministic and an O(n³) Las Vegs isomorphism test for trivalent graphs.", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2003-11-20 513-531 1987 34 J. ACM 3 http //doi.acm.org/10.1145/28869.28870 db/journals/jacm/jacm34.html#GalilHLSW87" }
+{ "id": 96, "dblpid": "conf/focs/GalilHLSW82", "title": "An O(n^3 log n) Deterministic and an O(n^3) Probabilistic Isomorphism Test for Trivalent Graphs", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2006-04-25 118-125 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#GalilHLSW82" }
+{ "id": 97, "dblpid": "journals/jacm/GalilT88", "title": "An O(n²(m + n log n)log n) min-cost flow algorithm.", "authors": "Zvi Galil Éva Tardos", "misc": "2003-11-20 374-386 1988 35 J. ACM 2 http //doi.acm.org/10.1145/42282.214090 db/journals/jacm/jacm35.html#GalilT88" }
+{ "id": 98, "dblpid": "conf/focs/GalilT86", "title": "An O(n^2 (m + n log n) log n) Min-Cost Flow Algorithm", "authors": "Zvi Galil Éva Tardos", "misc": "2006-04-25 1-9 conf/focs/FOCS27 1986 FOCS db/conf/focs/focs86.html#GalilT86" }
+{ "id": 99, "dblpid": "series/synthesis/2009Weintraub", "title": "Jordan Canonical Form  Theory and Practice", "authors": "Steven H. Weintraub", "misc": "2009-09-06 Jordan Canonical Form  Theory and Practice http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
+{ "id": 100, "dblpid": "series/synthesis/2009Brozos", "title": "The Geometry of Walker Manifolds", "authors": "Miguel Brozos-Vázquez Eduardo García-Río Peter Gilkey Stana Nikcevic Rámon Vázquez-Lorenzo", "misc": "2009-09-06 The Geometry of Walker Manifolds http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
diff --git a/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_6.adm b/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_6.adm
new file mode 100644
index 0000000..d7ae022
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_6.adm
@@ -0,0 +1,5 @@
+{ "word": "am", "count": 1 }
+{ "word": "grover", "count": 1 }
+{ "word": "hi", "count": 1 }
+{ "word": "i", "count": 1 }
+{ "word": "raman", "count": 1 }
diff --git a/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_7.adm b/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_7.adm
new file mode 100644
index 0000000..9720960
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_7.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson  uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ...  #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/accessors.adm b/asterix-app/src/test/resources/runtimets/results/temp/accessors.adm
new file mode 100644
index 0000000..4f36f91
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/accessors.adm
@@ -0,0 +1 @@
+{ "year1": 2010, "year2": 1987, "year3": -1987, "year4": 928, "year5": 1937, "year6": -3, "year7": 9, "month1": 10, "month2": 11, "month3": 11, "month4": 3, "month5": 12, "month6": 1, "day1": 30, "day2": 19, "day3": 19, "day4": 29, "day5": 29, "day6": 634, "hour1": 23, "hour2": 20, "hour3": 5, "hour4": 14, "min1": 49, "min2": 3, "min3": 23, "min4": 28, "second1": 23, "second2": 6, "second3": 34, "second4": 48, "ms1": 938, "ms2": 280, "ms3": 930, "ms4": 94 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/adjust_timezone.adm b/asterix-app/src/test/resources/runtimets/results/temp/adjust_timezone.adm
new file mode 100644
index 0000000..1f80fd9
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/adjust_timezone.adm
@@ -0,0 +1 @@
+{ "string1": "04:15:10.327+08:00", "string2": "2010-10-22T18:57:13.329-06:15" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/calendar_duration.adm b/asterix-app/src/test/resources/runtimets/results/temp/calendar_duration.adm
new file mode 100644
index 0000000..eb7d565
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/calendar_duration.adm
@@ -0,0 +1 @@
+{ "cduration1": duration("P20Y3M13DT7H48M21.329S"), "cduration2": duration("-P9M6DT4H45M39.328S"), "cduration3": duration("P8Y6M"), "cduration4": duration("-P21Y7M10DT13H9M42.983S"), "cduration5": duration("P20Y3M12DT7H48M21.329S"), "cduration6": duration("-P9M5DT4H45M39.328S"), "cduration7": duration("P8Y6M"), "cduration8": duration("-P21Y7M10DT13H9M42.983S") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/date_functions.adm b/asterix-app/src/test/resources/runtimets/results/temp/date_functions.adm
new file mode 100644
index 0000000..2276f85
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/date_functions.adm
@@ -0,0 +1 @@
+{ "date1": date("2012-09-17"), "date2": date("1327-12-02"), "date3": date("2012-10-10"), "date4": date("2010-05-17"), "date5": date("1703-08-09"), "duration1": duration("P137216D"), "duration2": duration("-P854D") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/datetime_functions.adm b/asterix-app/src/test/resources/runtimets/results/temp/datetime_functions.adm
new file mode 100644
index 0000000..6ebc562
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/datetime_functions.adm
@@ -0,0 +1 @@
+{ "datetime1": datetime("1970-01-12T01:33:27.429Z"), "datetime2": datetime("1327-12-02T23:35:49.938Z"), "datetime3": datetime("1327-12-02T23:35:49.938Z"), "duration1": duration("-P234526DT1H57M37.491S") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/insert_from_delimited_ds.adm b/asterix-app/src/test/resources/runtimets/results/temp/insert_from_delimited_ds.adm
new file mode 100644
index 0000000..5c40e46
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/insert_from_delimited_ds.adm
@@ -0,0 +1,4 @@
+{ "date": date("-2012-12-12"), "time": time("23:49:12.390Z"), "datetime": datetime("3827-12-12T11:43:29.329Z"), "duration": duration("P20Y19DT4H14M23.34S") }
+{ "date": date("1993-12-12"), "time": time("03:32:00.000Z"), "datetime": datetime("-2012-12-12T05:00:23.071Z"), "duration": duration("P20Y19D") }
+{ "date": date("1839-03-12"), "time": time("12:30:49.382Z"), "datetime": datetime("1012-06-12T00:37:00.000Z"), "duration": duration("PT4H14M23.34S") }
+{ "date": date("0003-11-02"), "time": time("23:19:32.382Z"), "datetime": datetime("2012-12-12T00:00:00.001Z"), "duration": duration("P20Y12DT12H9.34S") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/insert_from_ext_ds.adm b/asterix-app/src/test/resources/runtimets/results/temp/insert_from_ext_ds.adm
new file mode 100644
index 0000000..afe2ccc
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/insert_from_ext_ds.adm
@@ -0,0 +1,3 @@
+{ "date": date("-2012-12-12"), "time": time("23:49:12.390Z"), "datetime": datetime("2012-12-12T00:00:00.001Z"), "duration": duration("P20Y19DT4H14M23.34S"), "interval": interval("datetime("2012-12-12T00:00:00.001Z"), datetime("2013-08-10T22:10:15.398Z")") }
+{ "date": null, "time": null, "datetime": datetime("1920-12-20T23:29:18.478Z"), "duration": null, "interval": null }
+{ "date": null, "time": null, "datetime": null, "duration": null, "interval": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/interval_functions.adm b/asterix-app/src/test/resources/runtimets/results/temp/interval_functions.adm
new file mode 100644
index 0000000..9f9c9d3
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/interval_functions.adm
@@ -0,0 +1 @@
+{ "before1": true, "before2": false, "after1": true, "after2": false, "meet1": true, "meet2": false, "metby1": true, "metby2": false, "overlaps1": true, "overlaps2": false, "overlapped1": true, "overlapped2": false, "overlap1": true, "overlap2": false, "starts1": true, "starts2": false, "startedby1": true, "startedby2": false, "covers1": true, "covers2": false, "coveredby1": true, "coveredby2": false, "ends1": true, "ends2": false, "endedby1": true, "endedby2": false }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/time_functions.adm b/asterix-app/src/test/resources/runtimets/results/temp/time_functions.adm
new file mode 100644
index 0000000..56531bd
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/time_functions.adm
@@ -0,0 +1 @@
+{ "time1": time("00:26:00.074Z"), "time2": time("23:35:49.938Z"), "time3": time("23:30:23.000Z"), "time4": time("18:26:00.074Z"), "time5": time("00:11:49.938Z"), "duration1": duration("-PT23H24M"), "duration2": duration("PT18H") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23.adm b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23.adm
index 7fc9d1f..8bd28d4 100644
--- a/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23.adm
+++ b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23.adm
@@ -1,6 +1,6 @@
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:59:27 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:59:27 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:59:27 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:59:27 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:59:27 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:59:27 PST 2012" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{  }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index 9ae7c60..de694cd 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -754,8 +754,18 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="dml">
-      <compilation-unit name="drop-index">
-        <output-file compare="Text">drop-index.adm</output-file>
+      <compilation-unit name="drop-empty-secondary-indexes">
+        <output-file compare="Text">drop-empty-secondary-indexes.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="dml">
+      <compilation-unit name="create-drop-cltype">
+        <output-file compare="Text">create-drop-cltype.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="dml">
+      <compilation-unit name="create-drop-opntype">
+        <output-file compare="Text">create-drop-opntype.adm</output-file>
       </compilation-unit>
     </test-case>
     <test-case FilePath="dml">
@@ -2616,6 +2626,24 @@
         <output-file compare="Text">open-record-constructor_02.adm</output-file>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="records">
+      <compilation-unit name="closed-closed-fieldname-conflict_issue173">
+        <output-file compare="Text">closed-closed-fieldname-conflict_issue173.adm</output-file>
+        <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
+      </compilation-unit>
+    </test-case>
+  <test-case FilePath="records">
+      <compilation-unit name="open-closed-fieldname-conflict_issue173">
+        <output-file compare="Text">open-closed-fieldname-conflict_issue173.adm</output-file>
+        <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="records">
+      <compilation-unit name="open-open-fieldname-conflict_issue173">
+        <output-file compare="Text">open-open-fieldname-conflict_issue173.adm</output-file>
+        <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="scan">
     <test-case FilePath="scan">
@@ -3845,6 +3873,23 @@
       </compilation-unit>
     </test-case>
   </test-group>
+  <test-group name="hints">
+    <test-case FilePath="hints">
+      <compilation-unit name="issue_251_dataset_hint_5">
+        <output-file compare="Text">issue_251_dataset_hint_5.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="hints">
+      <compilation-unit name="issue_251_dataset_hint_6">
+        <output-file compare="Text">issue_251_dataset_hint_6.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="hints">
+      <compilation-unit name="issue_251_dataset_hint_7">
+        <output-file compare="Text">issue_251_dataset_hint_7.adm</output-file>
+      </compilation-unit>
+    </test-case>
+  </test-group>
   <test-group name="feeds">
     <test-case FilePath="feeds">
       <compilation-unit name="feeds_01">
diff --git a/asterix-app/src/test/resources/spatial/local/spatial-drop.aql b/asterix-app/src/test/resources/spatial/local/spatial-drop.aql
index 61d4913..fc881ad 100644
--- a/asterix-app/src/test/resources/spatial/local/spatial-drop.aql
+++ b/asterix-app/src/test/resources/spatial/local/spatial-drop.aql
@@ -11,6 +11,6 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset MyData(MyRecord)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 drop dataset MyData;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/spatial/local/spatial-load.aql b/asterix-app/src/test/resources/spatial/local/spatial-load.aql
index 6c135f3..1df33c0 100644
--- a/asterix-app/src/test/resources/spatial/local/spatial-load.aql
+++ b/asterix-app/src/test/resources/spatial/local/spatial-load.aql
@@ -11,7 +11,7 @@
 set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
 
 declare dataset MyData(MyRecord)
-  partitioned by key id on group1;
+  primary key id on group1;
 
 load dataset MyData 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/enlist_data.aql b/asterix-app/src/test/resources/tpch/queries/asterix/enlist_data.aql
index 2eb06f5..b0396f9 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/enlist_data.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/enlist_data.aql
@@ -89,21 +89,21 @@
   asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 
 enlist dataset Customers;
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/inlined_q18_large_volume_customer.aql b/asterix-app/src/test/resources/tpch/queries/asterix/inlined_q18_large_volume_customer.aql
index 1ac9c1c..bb21d7e 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/inlined_q18_large_volume_customer.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/inlined_q18_large_volume_customer.aql
@@ -46,11 +46,11 @@
       asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 
 write output to asterix-001:"/tmp/inlined_q18_large_volume_customer.adm";
 
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/load_data_adm.aql b/asterix-app/src/test/resources/tpch/queries/asterix/load_data_adm.aql
index e4b3a3b..7b872f0 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/load_data_adm.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/load_data_adm.aql
@@ -89,21 +89,21 @@
   asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 
 load dataset LineItems 
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_100x.aql b/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_100x.aql
index 1d40ba7..c4ccbe0 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_100x.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_100x.aql
@@ -89,21 +89,21 @@
   asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 
 load dataset Customers 
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_1x.aql b/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_1x.aql
index bb4a49d..654dcb6 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_1x.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_1x.aql
@@ -89,21 +89,21 @@
   asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 
 load dataset Customers 
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/q1_pricing_summary_report.aql b/asterix-app/src/test/resources/tpch/queries/asterix/q1_pricing_summary_report.aql
index b29e7a6..411fdbb 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/q1_pricing_summary_report.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/q1_pricing_summary_report.aql
@@ -23,7 +23,7 @@
       asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 
 write output to asterix-001:"/home/onose/hyracks-asterix/results/q1_pricing_summary_report.adm";
  
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/q3_shipping_priority.aql b/asterix-app/src/test/resources/tpch/queries/asterix/q3_shipping_priority.aql
index 71ed6db..6f52101 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/q3_shipping_priority.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/q3_shipping_priority.aql
@@ -46,11 +46,11 @@
       asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 
 write output to asterix-001:"/home/onose/hyracks-asterix/results/q3_shipping_priority.adm";
 
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/q5_local_supplier_volume.aql b/asterix-app/src/test/resources/tpch/queries/asterix/q5_local_supplier_volume.aql
index 8d0d6dd..7544d19 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/q5_local_supplier_volume.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/q5_local_supplier_volume.aql
@@ -69,17 +69,17 @@
       asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
   
 write output to asterix-001:"/home/onose/hyracks-asterix/results/q5_local_supplier.adm";
 
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/q9_product_type_profit.aql b/asterix-app/src/test/resources/tpch/queries/asterix/q9_product_type_profit.aql
index 3067fbe..164869a 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/q9_product_type_profit.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/q9_product_type_profit.aql
@@ -73,17 +73,17 @@
       asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
       
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
   
 write output to asterix-001:"/home/onose/hyracks-asterix/results/q9_product_type_profit.adm";
 
diff --git a/asterix-app/src/test/resources/tpch/queries/local/inlined_q18_large_volume_customer.aql b/asterix-app/src/test/resources/tpch/queries/local/inlined_q18_large_volume_customer.aql
index abd06af..280a1d0 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/inlined_q18_large_volume_customer.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/inlined_q18_large_volume_customer.aql
@@ -45,11 +45,11 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 
 write output to nc1:"/tmp/inlined_q18_large_volume_customer.adm";
 
diff --git a/asterix-app/src/test/resources/tpch/queries/local/load_adm_data.aql b/asterix-app/src/test/resources/tpch/queries/local/load_adm_data.aql
index b7e5b3d..d31c421 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/load_adm_data.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/load_adm_data.aql
@@ -88,21 +88,21 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 load dataset LineItems 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_0.001x_2nodes.aql b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_0.001x_2nodes.aql
index 0d03e5d..47724fa 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_0.001x_2nodes.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_0.001x_2nodes.aql
@@ -88,21 +88,21 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 load dataset LineItems 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_10x_1node.aql b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_10x_1node.aql
index 7c8fc8c..49e2a9a 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_10x_1node.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_10x_1node.aql
@@ -88,21 +88,21 @@
 declare nodegroup group1 on nc1;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 load dataset LineItems 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_1x_1node.aql b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_1x_1node.aql
index b6cf6f8..3e6ccc4 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_1x_1node.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_1x_1node.aql
@@ -88,21 +88,21 @@
 declare nodegroup group1 on nc1;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 load dataset LineItems 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_20x_2node.aql b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_20x_2node.aql
index 606cde1..f44d388 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_20x_2node.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_20x_2node.aql
@@ -88,21 +88,21 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 load dataset LineItems 
 using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/tpch/queries/local/orders-index-create.aql b/asterix-app/src/test/resources/tpch/queries/local/orders-index-create.aql
index e369f10..9c5e361 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/orders-index-create.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/orders-index-create.aql
@@ -17,6 +17,6 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 
 create index idx_Orders_Custkey on Orders(o_custkey);
diff --git a/asterix-app/src/test/resources/tpch/queries/local/orders-index-search.aql b/asterix-app/src/test/resources/tpch/queries/local/orders-index-search.aql
index f4c9fd9..66e88a5 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/orders-index-search.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/orders-index-search.aql
@@ -17,7 +17,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 
 declare index idx_Orders_Custkey on Orders(o_custkey);
 
diff --git a/asterix-app/src/test/resources/tpch/queries/local/q1_pricing_summary_report.aql b/asterix-app/src/test/resources/tpch/queries/local/q1_pricing_summary_report.aql
index a69528c..9ab0322 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/q1_pricing_summary_report.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/q1_pricing_summary_report.aql
@@ -22,7 +22,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 
 // for external datasets, use:
 //
diff --git a/asterix-app/src/test/resources/tpch/queries/local/q3_shipping_priority.aql b/asterix-app/src/test/resources/tpch/queries/local/q3_shipping_priority.aql
index b5c27cc..cadc5c9 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/q3_shipping_priority.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/q3_shipping_priority.aql
@@ -45,11 +45,11 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 
 write output to nc1:"/tmp/q3_shipping_priority.adm";
 
diff --git a/asterix-app/src/test/resources/tpch/queries/local/q5_local_supplier_volume.aql b/asterix-app/src/test/resources/tpch/queries/local/q5_local_supplier_volume.aql
index 9fe2ed6..724d58b 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/q5_local_supplier_volume.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/q5_local_supplier_volume.aql
@@ -68,17 +68,17 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
   
 write output to nc1:"/tmp/q5_local_supplier_volume.adm";
 
diff --git a/asterix-app/src/test/resources/tpch/queries/local/q9_product_type_profit.aql b/asterix-app/src/test/resources/tpch/queries/local/q9_product_type_profit.aql
index c0643b1..5407270 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/q9_product_type_profit.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/q9_product_type_profit.aql
@@ -72,17 +72,17 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
   
 write output to nc1:"/tmp/q9_product_type_profit.adm";
 
diff --git a/asterix-app/src/test/resources/tpch/queries/local/scan_filter_lineitem.aql b/asterix-app/src/test/resources/tpch/queries/local/scan_filter_lineitem.aql
index 3855746..ccddad0 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/scan_filter_lineitem.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/scan_filter_lineitem.aql
@@ -22,7 +22,7 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 
 // load dataset LineItems from nc1:"/opt/tpch/tengigs/lineitem.tbl" delimited by "|";
 
diff --git a/asterix-app/src/test/resources/tpch/queries/local/write-custorder.aql b/asterix-app/src/test/resources/tpch/queries/local/write-custorder.aql
index 9757ff8..b767d57 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/write-custorder.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/write-custorder.aql
@@ -50,13 +50,13 @@
 declare nodegroup group1 on nc1, nc2;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset CustOrder(CustOrderType)
-  partitioned by key co_custkey on group1;
+  primary key co_custkey on group1;
 
 write into dataset CustOrder 
 ( for $c in dataset('Customers')
diff --git a/asterix-app/src/test/resources/tpch/queries/rainbow/load_data_tbl_50x.aql b/asterix-app/src/test/resources/tpch/queries/rainbow/load_data_tbl_50x.aql
index 9635891..7d0295d 100644
--- a/asterix-app/src/test/resources/tpch/queries/rainbow/load_data_tbl_50x.aql
+++ b/asterix-app/src/test/resources/tpch/queries/rainbow/load_data_tbl_50x.aql
@@ -89,21 +89,21 @@
 rainbow-04, rainbow-05;
 
 declare dataset LineItems(LineItemType)
-  partitioned by key l_orderkey, l_linenumber on group1;
+  primary key l_orderkey, l_linenumber on group1;
 declare dataset Orders(OrderType)
-  partitioned by key o_orderkey on group1;
+  primary key o_orderkey on group1;
 declare dataset Customers(CustomerType) 
-  partitioned by key c_custkey on group1;
+  primary key c_custkey on group1;
 declare dataset Suppliers(SupplierType)
-  partitioned by key s_suppkey on group1;
+  primary key s_suppkey on group1;
 declare dataset Nations(NationType) 
-  partitioned by key n_nationkey on group1;
+  primary key n_nationkey on group1;
 declare dataset Regions(RegionType)
-  partitioned by key r_regionkey on group1;
+  primary key r_regionkey on group1;
 declare dataset Parts(PartType)
-  partitioned by key p_partkey on group1;
+  primary key p_partkey on group1;
 declare dataset PartSupp(PartSuppType)
-  partitioned by key ps_partkey, ps_suppkey on group1;  
+  primary key ps_partkey, ps_suppkey on group1;  
 
 
 load dataset Customers from 
diff --git a/asterix-aql/pom.xml b/asterix-aql/pom.xml
index 7ac2cbb..7617582 100644
--- a/asterix-aql/pom.xml
+++ b/asterix-aql/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
 	<parent>
 		<artifactId>asterix</artifactId>
@@ -13,8 +14,8 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
 				</configuration>
 			</plugin>
 			<plugin>
@@ -36,14 +37,15 @@
 						<goals>
 							<goal>jjdoc</goal>
 						</goals>
-                        <phase>process-sources</phase>
+						<phase>process-sources</phase>
 					</execution>
 				</executions>
 			</plugin>
 		</plugins>
 		<pluginManagement>
 			<plugins>
-				<!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
+				<!--This plugin's configuration is used to store Eclipse m2e settings 
+					only. It has no influence on the Maven build itself. -->
 				<plugin>
 					<groupId>org.eclipse.m2e</groupId>
 					<artifactId>lifecycle-mapping</artifactId>
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
index 463a256..443524e 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
@@ -14,6 +14,8 @@
  */
 package edu.uci.ics.asterix.aql.expression;
 
+import java.util.Map;
+
 import edu.uci.ics.asterix.aql.base.Statement;
 import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
 import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
@@ -26,17 +28,18 @@
     protected final Identifier itemTypeName;
     protected final DatasetType datasetType;
     protected final IDatasetDetailsDecl datasetDetailsDecl;
+    protected final Map<String, String> hints;
+    protected final boolean ifNotExists;
 
-    public boolean ifNotExists;
-
-    public DatasetDecl(Identifier dataverse, Identifier name, Identifier itemTypeName, DatasetType datasetType,
-            IDatasetDetailsDecl idd, boolean ifNotExists) {
+    public DatasetDecl(Identifier dataverse, Identifier name, Identifier itemTypeName, Map<String, String> hints,
+            DatasetType datasetType, IDatasetDetailsDecl idd, boolean ifNotExists) {
         this.dataverse = dataverse;
         this.name = name;
         this.itemTypeName = itemTypeName;
+        this.hints = hints;
         this.ifNotExists = ifNotExists;
         this.datasetType = datasetType;
-        datasetDetailsDecl = idd;
+        this.datasetDetailsDecl = idd;
     }
 
     public boolean getIfNotExists() {
@@ -55,6 +58,10 @@
         return itemTypeName;
     }
 
+    public Map<String, String> getHints() {
+        return hints;
+    }
+
     @Override
     public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
         return visitor.visitDatasetDecl(this, arg);
diff --git a/asterix-aql/src/main/javacc/AQL.jj b/asterix-aql/src/main/javacc/AQL.jj
index 8672fd1..e296266 100644
--- a/asterix-aql/src/main/javacc/AQL.jj
+++ b/asterix-aql/src/main/javacc/AQL.jj
@@ -17,6 +17,7 @@
 
 import java.util.Map;
 import java.util.HashMap;
+import java.util.LinkedHashMap;
 import edu.uci.ics.asterix.aql.literal.FloatLiteral;
 import edu.uci.ics.asterix.aql.literal.DoubleLiteral;
 import edu.uci.ics.asterix.aql.literal.FalseLiteral;
@@ -623,7 +624,7 @@
   Identifier dataverseName = null;
   Identifier datasetName = null;
   boolean alreadySorted = false;
-  String adapterClassname;
+  String adapterName;
   Map<String,String> properties;
   Pair<Identifier,Identifier> nameComponents = null;
 }
@@ -636,12 +637,11 @@
    }
    
    "using"
-
-    <STRING_LITERAL>
+  
     {
-      adapterClassname = removeQuotesAndEscapes(token.image);
+    	adapterName = getAdapterName();
     }
-
+   
     {
       properties = getConfiguration();
     }
@@ -652,11 +652,31 @@
         
   ";"
   {
-     return new LoadFromFileStatement(dataverseName, datasetName, adapterClassname, properties, alreadySorted);
+     return new LoadFromFileStatement(dataverseName, datasetName, adapterName, properties, alreadySorted);
   }   
 }
 
 
+String getAdapterName() throws ParseException :
+{
+	String adapterName = null;
+}
+{
+    ( 
+      <IDENTIFIER> {
+     	adapterName = (new Identifier(token.image)).getValue();; 
+      }
+      | 
+      <STRING_LITERAL>
+      {
+        adapterName = removeQuotesAndEscapes(token.image);
+      }
+    )
+    {
+	return adapterName;
+	}
+}
+
 
 DatasetDecl DatasetDeclaration(DatasetType datasetType) throws ParseException :
 {
@@ -669,7 +689,8 @@
   String nameComponentSecond = null;
   boolean ifNotExists = false;
   IDatasetDetailsDecl datasetDetails = null;
-  Pair<Identifier,Identifier> nameComponents = null;	
+  Pair<Identifier,Identifier> nameComponents = null;
+  Map<String,String> hints = new HashMap<String,String>();	
 }
 {
   {
@@ -701,41 +722,44 @@
       else if(datasetType == DatasetType.FEED) {
       	datasetDetails = FeedDatasetDeclaration();
       }
-      dd = new DatasetDecl(dataverseName, datasetName, itemTypeName, datasetType, datasetDetails,ifNotExists);
-      
   }
+  
+  (
+  "hints"
   {
-    return dd;
+      initProperties(hints);
+  }
+  )?
+   ";"
+ 
+  {
+   dd = new DatasetDecl(dataverseName, datasetName, itemTypeName, hints, datasetType, datasetDetails,ifNotExists);
+   return dd;
   }
 }
 
 InternalDetailsDecl InternalDatasetDeclaration() throws ParseException :
 {
     InternalDetailsDecl idd = null;
-    List<String> partitioningExprs = new ArrayList<String>();
+    List<String> primaryKeyFields = new ArrayList<String>();
     Identifier nodeGroupName=null;
 }
 {
-  "partitioned" "by" "key"
-  < IDENTIFIER >
-  {
-         partitioningExprs.add(token.image); 
-  }
   (
-    "," < IDENTIFIER >
     {
-         partitioningExprs.add(token.image); 
+  	  primaryKeyFields = getPrimaryKeyFields();
     }
-  )*
+  )
+  
   (
   "on" < IDENTIFIER >
     {
         nodeGroupName = new Identifier(token.image);
     }
   )?
-  ";"
+  
   {
-    idd = new InternalDetailsDecl(nodeGroupName, partitioningExprs);
+    idd = new InternalDetailsDecl(nodeGroupName, primaryKeyFields);
     return idd;
   }
 }
@@ -743,7 +767,7 @@
 ExternalDetailsDecl ExternalDatasetDeclaration() throws ParseException :
 {
   ExternalDetailsDecl edd = null;
-  String adapterClassname = null;
+  String adapterName = null;
   Map < String, String > properties;
 }
 {
@@ -752,10 +776,8 @@
   }
  
     "using"
-    
-     <STRING_LITERAL>
     {
-      adapterClassname = removeQuotesAndEscapes(token.image);
+    	adapterName = getAdapterName();
     }
 
     {
@@ -764,10 +786,9 @@
 
     {
     	  edd = new ExternalDetailsDecl();
-		  edd.setAdapter(adapterClassname);
+		  edd.setAdapter(adapterName);
    		  edd.setProperties(properties);
     } 
-  ";"
  
   {
     return edd;
@@ -777,10 +798,10 @@
 FeedDetailsDecl FeedDatasetDeclaration() throws ParseException :
 {
     FeedDetailsDecl fdd = null;
-    String adapterFactoryClassname = null;
+    String adapterName = null;
     Map < String, String > properties;
 	Pair<Identifier,Identifier> nameComponents;
-	List<String> partitioningExprs = new ArrayList<String>();
+	List<String> primaryKeyFields = new ArrayList<String>();
     Identifier nodeGroupName=null;
     FunctionSignature appliedFunction=null;
 	String dataverse;
@@ -789,10 +810,8 @@
 }
 {
    "using"
-   
-    <STRING_LITERAL>
     {
-      adapterFactoryClassname = removeQuotesAndEscapes(token.image);
+    	adapterName = getAdapterName();
     }
 
     {
@@ -816,30 +835,52 @@
   }   
   )?
   
-  "partitioned" "by" "key"
-  < IDENTIFIER >
-  {
-    	 partitioningExprs.add(token.image);
-  }
   (
-    "," < IDENTIFIER >
     {
-    	 partitioningExprs.add(token.image);
+  	  primaryKeyFields  = getPrimaryKeyFields();
     }
-  )*
+  )
+  
   (
   "on" < IDENTIFIER >
   {
     	nodeGroupName = new Identifier(token.image);
   }
   )?
-  ";"
+  
   {
-    fdd = new FeedDetailsDecl(adapterFactoryClassname, properties, appliedFunction, nodeGroupName, partitioningExprs);
+    fdd = new FeedDetailsDecl(adapterName, properties, appliedFunction, nodeGroupName, primaryKeyFields);
     return fdd;
   }
 }
 
+List<String> getPrimaryKeyFields()  throws ParseException :
+{
+	List<String> primaryKeyFields = new ArrayList<String>();
+}
+{
+
+  "primary" "key"
+  < IDENTIFIER >
+  {
+    	 primaryKeyFields.add(token.image);
+  }
+  (
+    "," < IDENTIFIER >
+    {
+    	 primaryKeyFields.add(token.image);
+    }
+  )*
+  {
+   	return primaryKeyFields;
+  }
+  
+}
+
+
+
+
+
 ControlFeedStatement ControlFeedDeclaration(ControlFeedStatement.OperationType operationType) throws ParseException :
 {
   Pair<Identifier,Identifier> nameComponents = null;
@@ -874,7 +915,7 @@
 
 Map<String,String> getConfiguration()  throws ParseException :
 {
-	Map<String,String> configuration = new HashMap<String,String>();
+	Map<String,String> configuration = new LinkedHashMap<String,String>();
 	String key;
 	String value;
 }
@@ -923,6 +964,75 @@
      }
 }
 
+void initProperties(Map<String,String> properties)  throws ParseException :
+{
+	String key;
+	String value;
+}
+{
+    (
+      <LEFTPAREN>
+        (
+          <IDENTIFIER>
+          {
+            key = (new Identifier(token.image)).getValue();
+          }
+          "=" 
+          (
+            (<STRING_LITERAL>
+             {
+              value = removeQuotesAndEscapes(token.image);
+             }
+            ) |
+            (<INTEGER_LITERAL>
+             {
+             try{
+              value = "" + Long.valueOf(token.image);
+              } catch (NumberFormatException nfe){
+                  throw new ParseException("inapproriate value: " + token.image); 
+              }
+             } 
+            )
+          )
+        {
+          properties.put(key.toUpperCase(), value);
+        }
+       ( 
+        "," 
+        (
+          <IDENTIFIER>
+          {
+            key = (new Identifier(token.image)).getValue();
+          }
+          "=" 
+          (
+           (<STRING_LITERAL>
+            {
+              value = removeQuotesAndEscapes(token.image);
+            }
+           ) |
+           (<INTEGER_LITERAL>
+            {
+              try{
+                value = "" + Long.valueOf(token.image);
+              } catch (NumberFormatException nfe){
+              	throw new ParseException("inapproriate value: " + token.image); 
+              }
+            } 
+           )
+          ) 
+        )
+        {
+          properties.put(key.toUpperCase(), value);
+        }
+        
+       )*
+      )
+       <RIGHTPAREN>
+    )?
+}
+
+
 
 NodegroupDecl NodegroupDeclaration() throws ParseException :
 {
diff --git a/asterix-common/pom.xml b/asterix-common/pom.xml
index ee81e75..0994109 100644
--- a/asterix-common/pom.xml
+++ b/asterix-common/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
 	<parent>
 		<artifactId>asterix</artifactId>
@@ -15,8 +16,8 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
 				</configuration>
 			</plugin>
 		</plugins>
diff --git a/asterix-external-data/pom.xml b/asterix-external-data/pom.xml
index 36e7a71..337da7f 100644
--- a/asterix-external-data/pom.xml
+++ b/asterix-external-data/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
 	<parent>
 		<artifactId>asterix</artifactId>
@@ -6,6 +7,7 @@
 		<version>0.0.4-SNAPSHOT</version>
 	</parent>
 	<artifactId>asterix-external-data</artifactId>
+
 	<build>
 		<plugins>
 			<plugin>
@@ -13,8 +15,8 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
 				</configuration>
 			</plugin>
 			<plugin>
@@ -49,14 +51,12 @@
 				<artifactId>maven-surefire-plugin</artifactId>
 				<version>2.7.2</version>
 				<configuration>
-					<!--
-						doesn't work from m2eclipse, currently
-						<additionalClasspathElements>
-						<additionalClasspathElement>${basedir}/src/main/resources</additionalClasspathElement>
-						</additionalClasspathElements>
-					-->
+					<!-- doesn't work from m2eclipse, currently <additionalClasspathElements> 
+						<additionalClasspathElement>${basedir}/src/main/resources</additionalClasspathElement> 
+						</additionalClasspathElements> -->
 					<forkMode>pertest</forkMode>
-					<argLine>-enableassertions -Xmx${test.heap.size}m -Dfile.encoding=UTF-8
+					<argLine>-enableassertions -Xmx${test.heap.size}m
+						-Dfile.encoding=UTF-8
 						-Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
 					<includes>
 						<include>**/*TestSuite.java</include>
@@ -101,39 +101,39 @@
 			<version>1.3.1-201002241208</version>
 			<scope>test</scope>
 		</dependency>
-                <dependency>
-                        <groupId>org.twitter4j</groupId>
-                        <artifactId>twitter4j-core</artifactId>
-                        <version>2.2.3</version>
-                </dependency>
-                <dependency>
-                        <groupId>org.apache.hadoop</groupId>
-                        <artifactId>hadoop-core</artifactId>
-                        <version>0.20.2</version>
-                        <type>jar</type>
-                        <scope>compile</scope>
-                </dependency>
-                <dependency>
-                        <groupId>net.java.dev.rome</groupId>
-                        <artifactId>rome-fetcher</artifactId>
-                        <version>1.0.0</version>
-                        <type>jar</type>
-                        <scope>compile</scope>
-                </dependency>
-                <dependency>
-                         <groupId>rome</groupId>
-                         <artifactId>rome</artifactId>
-                         <version>1.0.1-modified-01</version>
-                </dependency>
-	        <dependency>
-		         <groupId>edu.uci.ics.hyracks</groupId>
-		         <artifactId>hyracks-dataflow-hadoop</artifactId>
-	        </dependency>
-<dependency>
-            <groupId>jdom</groupId>
-            <artifactId>jdom</artifactId>
-            <version>1.0</version>
-        </dependency>
+		<dependency>
+			<groupId>org.twitter4j</groupId>
+			<artifactId>twitter4j-core</artifactId>
+			<version>2.2.3</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-core</artifactId>
+			<version>0.20.2</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>net.java.dev.rome</groupId>
+			<artifactId>rome-fetcher</artifactId>
+			<version>1.0.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>rome</groupId>
+			<artifactId>rome</artifactId>
+			<version>1.0.1-modified-01</version>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-hadoop</artifactId>
+		</dependency>
+		<dependency>
+			<groupId>jdom</groupId>
+			<artifactId>jdom</artifactId>
+			<version>1.0</version>
+		</dependency>
 	</dependencies>
 
 </project>
diff --git a/asterix-hyracks-glue/pom.xml b/asterix-hyracks-glue/pom.xml
index 5f991a4..1a18d1c 100644
--- a/asterix-hyracks-glue/pom.xml
+++ b/asterix-hyracks-glue/pom.xml
@@ -1,51 +1,52 @@
 <?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <parent>
-    <artifactId>asterix</artifactId>
-    <groupId>edu.uci.ics.asterix</groupId>
-    <version>0.0.4-SNAPSHOT</version>
-  </parent>
-  <groupId>edu.uci.ics.asterix</groupId>
-  <artifactId>asterix-hyracks-glue</artifactId>
-  <version>0.0.4-SNAPSHOT</version>
-  <build>
-       <plugins>
-           <plugin>
-              <groupId>org.apache.maven.plugins</groupId>
-              <artifactId>maven-compiler-plugin</artifactId>
-              <version>2.0.2</version>
-              <configuration>
-                 <source>1.6</source>
-                 <target>1.6</target>
-              </configuration>
-           </plugin>
-       </plugins>
-  </build>     
-  <name>asterix-hyracks-glue</name>
-  <url>http://maven.apache.org</url>
-  <dependencies>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <version>3.8.1</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-       <groupId>edu.uci.ics.asterix</groupId>
-       <artifactId>asterix-transactions</artifactId>
-       <version>0.0.4-SNAPSHOT</version>
-       <scope>compile</scope>
-    </dependency>
-    <dependency>
-    	<groupId>edu.uci.ics.asterix</groupId>
-    	<artifactId>asterix-common</artifactId>
-    	<version>0.0.4-SNAPSHOT</version>
-    	<type>jar</type>
-    	<scope>compile</scope>
-    </dependency>
-  </dependencies>
-  <properties>
-    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-  </properties>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<artifactId>asterix</artifactId>
+		<groupId>edu.uci.ics.asterix</groupId>
+		<version>0.0.4-SNAPSHOT</version>
+	</parent>
+	<groupId>edu.uci.ics.asterix</groupId>
+	<artifactId>asterix-hyracks-glue</artifactId>
+	<version>0.0.4-SNAPSHOT</version>
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+	<name>asterix-hyracks-glue</name>
+	<url>http://maven.apache.org</url>
+	<dependencies>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>3.8.1</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.asterix</groupId>
+			<artifactId>asterix-transactions</artifactId>
+			<version>0.0.4-SNAPSHOT</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.asterix</groupId>
+			<artifactId>asterix-common</artifactId>
+			<version>0.0.4-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
 </project>
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/Asterix_ADM_Parser.md b/asterix-maven-plugins/lexer-generator-maven-plugin/Asterix_ADM_Parser.md
new file mode 100644
index 0000000..eeaffc9
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/Asterix_ADM_Parser.md
@@ -0,0 +1,53 @@
+The Asterix ADM Parser
+======================
+
+The ADM parser inside Asterix is composed by two different components:
+
+* **The Parser** AdmTupleParser, which converts the adm tokens in internal objects
+* **The Lexer**  AdmLexer, which scans the adm file and returns a list of adm tokens
+
+These two classes belong to the package:
+
+    edu.uci.ics.asterix.runtime.operators.file
+
+The Parser is loaded through a factory (*AdmSchemafullRecordParserFactory*) by
+
+    edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter extends AbstractDatasourceAdapter
+
+
+How to add a new datatype
+-------------------------
+The ADM format allows two different kinds of datatype:
+
+* primitive
+* with constructor
+
+A primitive datatype allows to write the actual value of the field without extra markup:
+
+    { name : "Diego", age : 23 }
+
+while the datatypes with constructor require to specify first the type of the value and then a string with the serialized value
+
+    { center : point3d("P2.1,3,8.5") }
+
+In order to add a new datatype the steps are:
+
+1.  Add the new token to the **Lexer**
+  * **if the datatype is primite** is necessary to create a TOKEN able to recognize **the format of the value**
+  * **if the datatype is with constructor** is necessary to create **only** a TOKEN able to recognize **the name of the constructor**
+
+2.  Change the **Parser** in order to convert correctly the new token in internal objects
+  * This will require to **add new cases to the switch-case statements** and the introduction of **a serializer/deserializer object** for that datatype.
+
+
+The Lexer
+----------
+To add new datatype or change the tokens definition you have to change ONLY the file adm.grammar located in 
+	asterix-runtime/src/main/resources/adm.grammar
+The lexer will be generated from that definition file during each maven building.
+
+The maven configuration in located in asterix-runtime/pom.xml
+
+
+> Author: Diego Giorgini - diegogiorgini@gmail.com   
+> 6 December 2012
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/README.md b/asterix-maven-plugins/lexer-generator-maven-plugin/README.md
new file mode 100644
index 0000000..b3632e6
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/README.md
@@ -0,0 +1,111 @@
+Lexer Generator
+===============
+
+This tool automate the creation of Hand-Coded-Like Lexers.   
+It was created to address the performance issues of other (more advanced) lexer generators like JavaCC that arise when you need to scan TB of data. In particular it is *~20x faster* than javacc and typically can parse the data from a normal harddisk at *more than 70MBs*.
+
+
+Maven Plugin (to put inside pom.xml)
+-------------------------------------
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>2.0.2</version>
+                <configuration>
+                    <source>1.6</source>
+                    <target>1.6</target>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>edu.uci.ics.asterix</groupId>
+                <artifactId>lexer-generator-maven-plugin</artifactId>
+                <version>0.1-SNAPSHOT</version>
+                <configuration>
+                    <grammarFile>src/main/java/edu/uci/ics/asterix/runtime/operators/file/adm/adm.grammar</grammarFile>
+                    <outputDir>${project.build.directory}/generated-sources</outputDir>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>generate-lexer</id>
+                        <phase>generate-sources</phase>
+                        <goals>
+                            <goal>generate-lexer</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+
+
+Command line
+-------------
+    LexerGenerator\nusage: java LexerGenerator <configuration file>
+
+
+
+What means Hand-Coded-Like and why it is so fast
+------------------------------------------------
+The most of the Lexers use a Finite State Machine encoded in data structure called [State Transition Table](http://en.wikipedia.org/wiki/State_transition_table).   
+While elegant and practical this approach require some extra controls and operations to deal with the data structure at runtime. A different approach consists in encoding the State Machine as actual code, in this way all the operations done are limited to the minumum amount needed to parse our grammar.   
+A common problem with this kind of hard-hand-coded lexers is that is almost impossible to do manutency and changes, this is the reason of this Lexer Generator able to produce a Hand-Coded-Like lexer starting from a grammar specification.
+
+Another big difference with the most of the LexerGenerator (expecially the ones for Java) is that since it is optimized for performance we **don't return objects** and we **use the minimum possible of objects internally**.    
+This actually is the main reason of the ~20x when compared with javacc.
+
+
+Configuration File
+------------------
+Is a simple *key: value* configuration file plus the *specification of your grammar*.   
+The four configuration keys are listed below:
+
+    # LEXER GENERATOR configuration file
+    # ---------------------------------------
+    # Place *first* the generic configuration
+    # then list your grammar.
+
+    PACKAGE:          edu.uci.ics.asterix.admfast.parser
+    LEXER_NAME:       AdmLexer
+    OUTPUT_DIR:       output/
+
+
+Specify The Grammar
+-------------------
+Your grammar has to be listed in the configuration file after the *TOKENS:* keyword.
+
+    TOKENS:
+
+    BOOLEAN_LIT        = string(boolean)
+    COMMA              = char(\,)
+    COLON              = char(:)
+    STRING_LITERAL     = char("), anythingUntil(")
+    INT_LITERAL        = signOrNothing(), digitSequence()
+    INT8_LITERAL       = token(INT_LITERAL), string(i8)
+    @EXPONENT          = caseInsensitiveChar(e), signOrNothing(), digitSequence()
+    DOUBLE_LITERAL     = signOrNothing(), digitSequence(), char(.), digitSequence(), token(@EXPONENT)
+    DOUBLE_LITERAL     = signOrNothing(), digitSequence(), token(@EXPONENT)
+
+Each token is composed by a **name** and a sequence of **rules**.   
+Each rule is then written with the format: **constructor(parameter)**  
+the list of the rules available is coded inside *NodeChainFactory.java*
+
+You can write more than a sequence of rules just addind more another line and repeating the token name.
+
+You can reuse the rules of a token inside another one with the special rule: **token(** *TOKEN_NAME* **)**
+
+Lastly you can define *auxiliary* token definitions that will not be encoded in the final lexer (but that can be useful inside other token definitions) just **startig the token name with @**.
+
+**Attention:** please pay attention to not write rules that once merged int the state machine would lead to a *conflict between transaction* like a transaction for a generic digit and one for a the digit 0 from the same node. 
+
+The result: MyLexer
+-------------------
+The result of the execution of the LexerGenerator is the creation of the Lexer inside the directory *components**.
+The lexer is extremly easy and minimal and can be used likewise an Iterator:
+
+     MyLexer myLexer = new MyLexer(new FileReader(file)));
+     while((token = MyLexer.next()) != MyLexer.TOKEN_EOF){
+        System.out.println(MyLexer.tokenKindToString(token));
+     }
+
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml b/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
new file mode 100644
index 0000000..524727f
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
@@ -0,0 +1,36 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.asterix</groupId>
+  <artifactId>lexer-generator-maven-plugin</artifactId>
+  <version>0.1</version>
+  <packaging>maven-plugin</packaging>
+  <name>lexer-generator-maven-plugin</name>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.6</source>
+          <target>1.6</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>4.8.1</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.maven</groupId>
+      <artifactId>maven-plugin-api</artifactId>
+      <version>2.0.2</version>
+    </dependency>
+  </dependencies>    
+</project>
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerGenerator.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerGenerator.java
new file mode 100644
index 0000000..512f3d0
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerGenerator.java
@@ -0,0 +1,202 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.Reader;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map.Entry;
+import java.util.Set;
+import org.apache.maven.plugin.logging.Log;
+
+public class LexerGenerator {
+    private LinkedHashMap<String, Token> tokens = new LinkedHashMap<String, Token>();
+    private Log logger;
+
+    public LexerGenerator() {
+    }
+
+    public LexerGenerator(Log logger) {
+        this.logger = logger;
+    }
+
+    private void log(String info) {
+        if (logger == null) {
+            System.out.println(info);
+        } else {
+            logger.info(info);
+        }
+    }
+
+    public void addToken(String rule) throws Exception {
+        Token newToken;
+        if (rule.charAt(0) == '@') {
+            newToken = new TokenAux(rule, tokens);
+        } else {
+            newToken = new Token(rule, tokens);
+        }
+        Token existingToken = tokens.get(newToken.getName());
+        if (existingToken == null) {
+            tokens.put(newToken.getName(), newToken);
+        } else {
+            existingToken.merge(newToken);
+        }
+    }
+
+    public void generateLexer(HashMap<String, String> config) throws Exception {
+        LexerNode main = this.compile();
+        config.put("TOKENS_CONSTANTS", this.tokensConstants());
+        config.put("TOKENS_IMAGES", this.tokensImages());
+        config.put("LEXER_LOGIC", main.toJava());
+        config.put("LEXER_AUXFUNCTIONS", replaceParams(this.auxiliaryFunctions(main), config));
+        String[] files = { "/Lexer.java", "/LexerException.java" };
+        String outputDir = config.get("OUTPUT_DIR");
+        (new File(outputDir)).mkdirs();
+        for (String file : files) {
+            String input = readFile(LexerGenerator.class.getResourceAsStream(file));
+            String fileOut = file.replace("Lexer", config.get("LEXER_NAME"));
+            String output = replaceParams(input, config);
+            log("Generating: " + file + "\t>>\t" + fileOut);
+            FileWriter out = new FileWriter((new File(outputDir, fileOut)).toString());
+            out.write(output);
+            out.close();
+            log(" [done]\n");
+        }
+    }
+
+    public String printParsedGrammar() {
+        StringBuilder result = new StringBuilder();
+        for (Token token : tokens.values()) {
+            result.append(token.toString()).append("\n");
+        }
+        return result.toString();
+    }
+
+    private LexerNode compile() throws Exception {
+        LexerNode main = new LexerNode();
+        for (Token token : tokens.values()) {
+            if (token instanceof TokenAux)
+                continue;
+            main.merge(token.getNode());
+        }
+        return main;
+    }
+
+    private String tokensImages() {
+        StringBuilder result = new StringBuilder();
+        Set<String> uniqueTokens = tokens.keySet();
+        for (String token : uniqueTokens) {
+            result.append(", \"<").append(token).append(">\" ");
+        }
+        return result.toString();
+    }
+
+    private String tokensConstants() {
+        StringBuilder result = new StringBuilder();
+        Set<String> uniqueTokens = tokens.keySet();
+        int i = 2;
+        for (String token : uniqueTokens) {
+            result.append(", TOKEN_").append(token).append("=").append(i).append(" ");
+            i++;
+        }
+        return result.toString();
+    }
+
+    private String auxiliaryFunctions(LexerNode main) {
+        StringBuilder result = new StringBuilder();
+        Set<String> functions = main.neededAuxFunctions();
+        for (String token : functions) {
+            result.append("private int parse_" + token
+                    + "(char currentChar) throws IOException, [LEXER_NAME]Exception{\n");
+            result.append(tokens.get(token).getNode().toJavaAuxFunction());
+            result.append("\n}\n\n");
+        }
+        return result.toString();
+    }
+
+    private static String readFile(Reader input) throws FileNotFoundException, IOException {
+        StringBuffer fileData = new StringBuffer(1000);
+        BufferedReader reader = new BufferedReader(input);
+        char[] buf = new char[1024];
+        int numRead = 0;
+        while ((numRead = reader.read(buf)) != -1) {
+            String readData = String.valueOf(buf, 0, numRead);
+            fileData.append(readData);
+            buf = new char[1024];
+        }
+        reader.close();
+        return fileData.toString();
+    }
+
+    private static String readFile(InputStream input) throws FileNotFoundException, IOException {
+        if (input == null) {
+            throw new FileNotFoundException();
+        }
+        return readFile(new InputStreamReader(input));
+    }
+
+    private static String readFile(String fileName) throws FileNotFoundException, IOException {
+        return readFile(new FileReader(fileName));
+    }
+
+    private static String replaceParams(String input, HashMap<String, String> config) {
+        for (Entry<String, String> param : config.entrySet()) {
+            String key = "\\[" + param.getKey() + "\\]";
+            String value = param.getValue();
+            input = input.replaceAll(key, value);
+        }
+        return input;
+    }
+
+    public static void main(String args[]) throws Exception {
+        if (args.length == 0 || args[0] == "--help" || args[0] == "-h") {
+            System.out.println("LexerGenerator\nusage: java LexerGenerator <configuration file>");
+            return;
+        }
+
+        LexerGenerator lexer = new LexerGenerator();
+        HashMap<String, String> config = new HashMap<String, String>();
+
+        System.out.println("Config file:\t" + args[0]);
+        String input = readFile(args[0]);
+        boolean tokens = false;
+        for (String line : input.split("\r?\n")) {
+            line = line.trim();
+            if (line.length() == 0 || line.charAt(0) == '#')
+                continue;
+            if (tokens == false && !line.equals("TOKENS:")) {
+                config.put(line.split("\\s*:\\s*")[0], line.split("\\s*:\\s*")[1]);
+            } else if (line.equals("TOKENS:")) {
+                tokens = true;
+            } else {
+                lexer.addToken(line);
+            }
+        }
+
+        String parsedGrammar = lexer.printParsedGrammar();
+        lexer.generateLexer(config);
+        System.out.println("\nGenerated grammar:");
+        System.out.println(parsedGrammar);
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerGeneratorMojo.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerGeneratorMojo.java
new file mode 100644
index 0000000..11ee1d5
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerGeneratorMojo.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator;
+
+import edu.uci.ics.asterix.lexergenerator.LexerGenerator;
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.HashMap;
+import org.apache.maven.plugin.AbstractMojo;
+import org.apache.maven.plugin.MojoExecutionException;
+
+import java.io.File;
+
+/**
+ * @goal generate-lexer
+ * @phase generate-sources
+ * @requiresDependencyResolution compile
+ */
+public class LexerGeneratorMojo extends AbstractMojo {
+    /**
+     * parameter injected from pom.xml
+     * 
+     * @parameter
+     * @required
+     */
+    private File grammarFile;
+
+    /**
+     * parameter injected from pom.xml
+     * 
+     * @parameter
+     * @required
+     */
+    private File outputDir;
+
+    public void execute() throws MojoExecutionException {
+        LexerGenerator lexer = new LexerGenerator(getLog());
+        HashMap<String, String> config = new HashMap<String, String>();
+        getLog().info("--- Lexer Generator Maven Plugin - started with grammarFile: " + grammarFile.toString());
+        try {
+            String input = readFile(grammarFile);
+            config.put("OUTPUT_DIR", outputDir.toString());
+            boolean tokens = false;
+            for (String line : input.split("\r?\n")) {
+                line = line.trim();
+                if (line.length() == 0 || line.charAt(0) == '#')
+                    continue;
+                if (tokens == false && !line.equals("TOKENS:")) {
+                    config.put(line.split("\\s*:\\s*")[0], line.split("\\s*:\\s*")[1]);
+                } else if (line.equals("TOKENS:")) {
+                    tokens = true;
+                } else {
+                    lexer.addToken(line);
+                }
+            }
+            lexer.generateLexer(config);
+        } catch (Throwable e) {
+            throw new MojoExecutionException("Error while generating lexer", e);
+        }
+        String parsedGrammar = lexer.printParsedGrammar();
+        getLog().info("--- Generated grammar:\n" + parsedGrammar);
+    }
+
+    private String readFile(File file) throws FileNotFoundException, IOException {
+        StringBuffer fileData = new StringBuffer(1000);
+        BufferedReader reader = new BufferedReader(new FileReader(file));
+        char[] buf = new char[1024];
+        int numRead = 0;
+        while ((numRead = reader.read(buf)) != -1) {
+            String readData = String.valueOf(buf, 0, numRead);
+            fileData.append(readData);
+            buf = new char[1024];
+        }
+        reader.close();
+        return fileData.toString();
+    }
+
+}
\ No newline at end of file
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerNode.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerNode.java
new file mode 100644
index 0000000..7b8d059
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerNode.java
@@ -0,0 +1,243 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+import edu.uci.ics.asterix.lexergenerator.rules.*;
+
+public class LexerNode {
+    private static String TOKEN_PREFIX = "TOKEN_";
+    private LinkedHashMap<Rule, LexerNode> actions = new LinkedHashMap<Rule, LexerNode>();
+    private String finalTokenName;
+    private Set<String> ongoingParsing = new HashSet<String>();
+
+    public LexerNode clone() {
+        LexerNode node = new LexerNode();
+        node.finalTokenName = this.finalTokenName;
+        for (Map.Entry<Rule, LexerNode> entry : this.actions.entrySet()) {
+            node.actions.put(entry.getKey().clone(), entry.getValue().clone());
+        }
+        for (String ongoing : this.ongoingParsing) {
+            node.ongoingParsing.add(ongoing);
+        }
+        return node;
+    }
+
+    public void add(Rule newRule) {
+        if (actions.get(newRule) == null) {
+            actions.put(newRule, new LexerNode());
+        }
+    }
+
+    public void append(Rule newRule) {
+        if (actions.size() == 0) {
+            add(newRule);
+        } else {
+            for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+                action.getValue().append(newRule);
+            }
+            if (actions.containsKey(new RuleEpsilon())) {
+                actions.remove(new RuleEpsilon());
+                add(newRule);
+            }
+        }
+    }
+
+    public void merge(LexerNode newNode) throws Exception {
+        for (Map.Entry<Rule, LexerNode> action : newNode.actions.entrySet()) {
+            if (this.actions.get(action.getKey()) == null) {
+                this.actions.put(action.getKey(), action.getValue());
+            } else {
+                this.actions.get(action.getKey()).merge(action.getValue());
+            }
+        }
+        if (newNode.finalTokenName != null) {
+            if (this.finalTokenName == null) {
+                this.finalTokenName = newNode.finalTokenName;
+            } else {
+                throw new Exception("Rule conflict between: " + this.finalTokenName + " and " + newNode.finalTokenName);
+            }
+        }
+        for (String ongoing : newNode.ongoingParsing) {
+            this.ongoingParsing.add(ongoing);
+        }
+    }
+
+    public void append(LexerNode node) throws Exception {
+        for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+            if (action.getKey() instanceof RuleEpsilon)
+                continue;
+            action.getValue().append(node);
+        }
+        if (actions.containsKey(new RuleEpsilon())) {
+            actions.remove(new RuleEpsilon());
+            merge(node.clone());
+        }
+        if (actions.size() == 0 || finalTokenName != null) {
+            finalTokenName = null;
+            merge(node.clone());
+        }
+    }
+
+    public void appendTokenName(String name) {
+        if (actions.size() == 0) {
+            this.finalTokenName = name;
+        } else {
+            ongoingParsing.add(TOKEN_PREFIX + name);
+            for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+                action.getValue().appendTokenName(name);
+            }
+        }
+    }
+
+    public LexerNode removeTokensName() {
+        this.finalTokenName = null;
+        this.ongoingParsing.clear();
+        for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+            action.getValue().removeTokensName();
+        }
+        return this;
+    }
+
+    public String toString() {
+        StringBuilder result = new StringBuilder();
+        if (finalTokenName != null)
+            result.append("! ");
+        if (actions.size() == 1)
+            result.append(actions.keySet().toArray()[0].toString() + actions.values().toArray()[0].toString());
+        if (actions.size() > 1) {
+            result.append(" ( ");
+            for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+                if (result.length() != 3) {
+                    result.append(" || ");
+                }
+                result.append(action.getKey().toString());
+                result.append(action.getValue().toString());
+            }
+            result.append(" ) ");
+        }
+        return result.toString();
+    }
+
+    public String toJava() {
+        StringBuffer result = new StringBuffer();
+        if (numberOfRuleChar() > 2) {
+            result.append(toJavaSingleCharRules());
+            result.append(toJavaComplexRules(false));
+        } else {
+            result.append(toJavaComplexRules(true));
+        }
+        if (this.finalTokenName != null) {
+            result.append("return " + TOKEN_PREFIX + finalTokenName + ";\n");
+        } else if (ongoingParsing != null) {
+            String ongoingParsingArgs = collectionJoin(ongoingParsing, ',');
+            result.append("return parseError(" + ongoingParsingArgs + ");\n");
+        }
+        return result.toString();
+    }
+
+    private int numberOfRuleChar() {
+        int singleCharRules = 0;
+        for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+            if (action.getKey() instanceof RuleChar)
+                singleCharRules++;
+        }
+        return singleCharRules;
+    }
+
+    private String toJavaSingleCharRules() {
+        StringBuffer result = new StringBuffer();
+        result.append("switch(currentChar){\n");
+        for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+            if (action.getKey() instanceof RuleChar) {
+                RuleChar rule = (RuleChar) action.getKey();
+                result.append("case '" + rule.expectedChar() + "':\n");
+                result.append(rule.javaAction()).append("\n");
+                result.append(action.getValue().toJava());
+            }
+        }
+        result.append("}\n");
+        return result.toString();
+    }
+
+    private String toJavaComplexRules(boolean all) {
+        StringBuffer result = new StringBuffer();
+        for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+            if (!all && action.getKey() instanceof RuleChar)
+                continue;
+            if (action.getKey() instanceof RuleEpsilon)
+                continue;
+            String act = action.getKey().javaAction();
+            if (act.length() > 0) {
+                act = "\n" + act;
+            }
+            result.append(action.getKey().javaMatch(act + "\n" + action.getValue().toJava()));
+        }
+        return result.toString();
+    }
+
+    public void expandFirstAction(LinkedHashMap<String, Token> tokens) throws Exception {
+        for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+            Rule first = action.getKey();
+            if (first instanceof RulePartial) {
+                if (tokens.get(((RulePartial) first).getPartial()) == null) {
+                    throw new Exception("Cannot find a token used as part of another definition, missing token: "
+                            + ((RulePartial) first).getPartial());
+                }
+                actions.remove(first);
+                LexerNode node = tokens.get(((RulePartial) first).getPartial()).getNode().clone();
+                merge(node);
+            }
+        }
+    }
+
+    public Set<String> neededAuxFunctions() {
+        HashSet<String> partials = new HashSet<String>();
+        for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+            Rule rule = action.getKey();
+            if (rule instanceof RulePartial) {
+                partials.add(((RulePartial) rule).getPartial());
+            }
+            partials.addAll(action.getValue().neededAuxFunctions());
+        }
+        return partials;
+    }
+
+    public String toJavaAuxFunction() {
+        String oldFinalTokenName = finalTokenName;
+        if (oldFinalTokenName == null)
+            finalTokenName = "AUX_NOT_FOUND";
+        String result = toJava();
+        finalTokenName = oldFinalTokenName;
+        return result;
+    }
+
+    private String collectionJoin(Collection<String> collection, char c) {
+        StringBuilder ongoingParsingArgs = new StringBuilder();
+        for (String token : collection) {
+            ongoingParsingArgs.append(token);
+            ongoingParsingArgs.append(c);
+        }
+        if (ongoingParsing.size() > 0) {
+            ongoingParsingArgs.deleteCharAt(ongoingParsingArgs.length() - 1);
+        }
+        return ongoingParsingArgs.toString();
+    }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/NodeChainFactory.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/NodeChainFactory.java
new file mode 100644
index 0000000..941f822
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/NodeChainFactory.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator;
+
+import java.util.HashMap;
+
+import edu.uci.ics.asterix.lexergenerator.rulegenerators.*;
+
+public class NodeChainFactory {
+    static private HashMap<String, RuleGenerator> ruleGenerators = new HashMap<String, RuleGenerator>();
+
+    static {
+        ruleGenerators.put("char", new RuleGeneratorChar());
+        ruleGenerators.put("string", new RuleGeneratorString());
+        ruleGenerators.put("anythingUntil", new RuleGeneratorAnythingUntil());
+        ruleGenerators.put("signOrNothing", new RuleGeneratorSignOrNothing());
+        ruleGenerators.put("sign", new RuleGeneratorSign());
+        ruleGenerators.put("digitSequence", new RuleGeneratorDigitSequence());
+        ruleGenerators.put("caseInsensitiveChar", new RuleGeneratorCaseInsensitiveChar());
+        ruleGenerators.put("charOrNothing", new RuleGeneratorCharOrNothing());
+        ruleGenerators.put("token", new RuleGeneratorToken());
+        ruleGenerators.put("nothing", new RuleGeneratorNothing());
+    }
+
+    public static LexerNode create(String generator, String constructor) throws Exception {
+        constructor = constructor.replace("@", "aux_");
+        if (ruleGenerators.get(generator) == null)
+            throw new Exception("Rule Generator not found for '" + generator + "'");
+        return ruleGenerators.get(generator).generate(constructor);
+    }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/Token.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/Token.java
new file mode 100644
index 0000000..bb122c2
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/Token.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator;
+
+import java.util.LinkedHashMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class Token {
+    private String userDescription;
+    private String name;
+    private LexerNode node;
+
+    public Token(String str, LinkedHashMap<String, Token> tokens) throws Exception {
+        userDescription = str;
+        node = new LexerNode();
+        parse(userDescription, tokens);
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public LexerNode getNode() {
+        return node;
+    }
+
+    public String toString() {
+        return this.name + " => " + getNode().toString();
+    }
+
+    public void merge(Token newToken) throws Exception {
+        node.merge(newToken.getNode());
+    }
+
+    private void parse(String str, LinkedHashMap<String, Token> tokens) throws Exception {
+        Pattern p = Pattern.compile("^(@?\\w+)\\s*=\\s*(.+)");
+        Matcher m = p.matcher(str);
+        if (!m.find())
+            throw new Exception("Token definition not correct: " + str);
+        this.name = m.group(1).replaceAll("@", "aux_");
+        String[] textRules = m.group(2).split("(?<!\\\\),\\s*");
+        for (String textRule : textRules) {
+            Pattern pRule = Pattern.compile("^(\\w+)(\\((.*)\\))?");
+            Matcher mRule = pRule.matcher(textRule);
+            mRule.find();
+            String generator = mRule.group(1);
+            String constructor = mRule.group(3);
+            if (constructor == null)
+                throw new Exception("Error in rule format: " + "\n " + str + " = " + generator + " : " + constructor);
+            constructor = constructor.replace("\\", "");
+            node.append(NodeChainFactory.create(generator, constructor));
+            node.expandFirstAction(tokens);
+        }
+        node.appendTokenName(name);
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/TokenAux.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/TokenAux.java
new file mode 100644
index 0000000..a9c7ffc
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/TokenAux.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator;
+
+import java.util.LinkedHashMap;
+
+public class TokenAux extends Token {
+
+    public TokenAux(String str, LinkedHashMap<String, Token> tokens) throws Exception {
+        super(str, tokens);
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGenerator.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGenerator.java
new file mode 100644
index 0000000..3733746
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGenerator.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+
+public interface RuleGenerator {
+    public LexerNode generate(String input) throws Exception;
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorAnythingUntil.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorAnythingUntil.java
new file mode 100644
index 0000000..b14eb3e
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorAnythingUntil.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleAnythingUntil;
+
+public class RuleGeneratorAnythingUntil implements RuleGenerator {
+
+    @Override
+    public LexerNode generate(String input) throws Exception {
+        LexerNode result = new LexerNode();
+        if (input == null || input.length() != 1)
+            throw new Exception("Wrong rule format for generator anythingExcept: " + input);
+        result.append(new RuleAnythingUntil(input.charAt(0)));
+        return result;
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorCaseInsensitiveChar.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorCaseInsensitiveChar.java
new file mode 100644
index 0000000..b789f59
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorCaseInsensitiveChar.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+
+public class RuleGeneratorCaseInsensitiveChar implements RuleGenerator {
+
+    @Override
+    public LexerNode generate(String input) throws Exception {
+        LexerNode result = new LexerNode();
+        if (input == null || input.length() != 1)
+            throw new Exception("Wrong rule format for generator char: " + input);
+        char cl = Character.toLowerCase(input.charAt(0));
+        char cu = Character.toUpperCase(cl);
+        result.add(new RuleChar(cl));
+        result.add(new RuleChar(cu));
+        return result;
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorChar.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorChar.java
new file mode 100644
index 0000000..0b830e6
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorChar.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+
+public class RuleGeneratorChar implements RuleGenerator {
+
+    @Override
+    public LexerNode generate(String input) throws Exception {
+        LexerNode result = new LexerNode();
+        if (input == null || input.length() != 1)
+            throw new Exception("Wrong rule format for generator char: " + input);
+        result.append(new RuleChar(input.charAt(0)));
+        return result;
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorCharOrNothing.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorCharOrNothing.java
new file mode 100644
index 0000000..d01ff7d
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorCharOrNothing.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleEpsilon;
+
+public class RuleGeneratorCharOrNothing implements RuleGenerator {
+
+    @Override
+    public LexerNode generate(String input) throws Exception {
+        LexerNode result = new LexerNode();
+        if (input == null || input.length() != 1)
+            throw new Exception("Wrong rule format for generator charOrNothing: " + input);
+        result.add(new RuleChar(input.charAt(0)));
+        result.add(new RuleEpsilon());
+        return result;
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorDigitSequence.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorDigitSequence.java
new file mode 100644
index 0000000..d067ee7
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorDigitSequence.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleDigitSequence;
+
+public class RuleGeneratorDigitSequence implements RuleGenerator {
+
+    @Override
+    public LexerNode generate(String input) throws Exception {
+        LexerNode result = new LexerNode();
+        result.append(new RuleDigitSequence());
+        return result;
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorNothing.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorNothing.java
new file mode 100644
index 0000000..fec06a1
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorNothing.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleEpsilon;
+
+public class RuleGeneratorNothing implements RuleGenerator {
+
+    @Override
+    public LexerNode generate(String input) throws Exception {
+        LexerNode node = new LexerNode();
+        node.add(new RuleEpsilon());
+        return node;
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorSign.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorSign.java
new file mode 100644
index 0000000..0160f09
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorSign.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+
+public class RuleGeneratorSign implements RuleGenerator {
+
+    @Override
+    public LexerNode generate(String input) throws Exception {
+        LexerNode result = new LexerNode();
+        result.add(new RuleChar('+'));
+        result.add(new RuleChar('-'));
+        return result;
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorSignOrNothing.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorSignOrNothing.java
new file mode 100644
index 0000000..7c4297d
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorSignOrNothing.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleEpsilon;
+
+public class RuleGeneratorSignOrNothing implements RuleGenerator {
+
+    @Override
+    public LexerNode generate(String input) throws Exception {
+        LexerNode result = new LexerNode();
+        result.add(new RuleChar('+'));
+        result.add(new RuleChar('-'));
+        result.add(new RuleEpsilon());
+        return result;
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorString.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorString.java
new file mode 100644
index 0000000..eb0471b
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorString.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+
+public class RuleGeneratorString implements RuleGenerator {
+
+    @Override
+    public LexerNode generate(String input) {
+        LexerNode result = new LexerNode();
+        if (input == null)
+            return result;
+        for (int i = 0; i < input.length(); i++) {
+            result.append(new RuleChar(input.charAt(i)));
+        }
+        return result;
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorToken.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorToken.java
new file mode 100644
index 0000000..b4c23d8
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorToken.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RulePartial;
+
+public class RuleGeneratorToken implements RuleGenerator {
+
+    @Override
+    public LexerNode generate(String input) throws Exception {
+        if (input == null || input.length() == 0)
+            throw new Exception("Wrong rule format for generator token : " + input);
+        LexerNode node = new LexerNode();
+        node.add(new RulePartial(input));
+        return node;
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/Rule.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/Rule.java
new file mode 100644
index 0000000..01cd1d5
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/Rule.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rules;
+
+public interface Rule {
+    public int hashCode();
+
+    public boolean equals(Object o);
+
+    public String toString();
+
+    public String javaAction();
+
+    public String javaMatch(String action);
+
+    public Rule clone();
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleAnythingUntil.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleAnythingUntil.java
new file mode 100644
index 0000000..8d45835
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleAnythingUntil.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rules;
+
+public class RuleAnythingUntil implements Rule {
+
+    private char expected;
+
+    public RuleAnythingUntil clone() {
+        return new RuleAnythingUntil(expected);
+    }
+
+    public RuleAnythingUntil(char expected) {
+        this.expected = expected;
+    }
+
+    @Override
+    public String toString() {
+        return " .* " + String.valueOf(expected);
+    }
+
+    @Override
+    public int hashCode() {
+        return 10 * (int) expected;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == null)
+            return false;
+        if (o instanceof RuleAnythingUntil) {
+            if (((RuleAnythingUntil) o).expected == this.expected) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public String javaAction() {
+        return "currentChar = readNextChar();";
+    }
+
+    @Override
+    public String javaMatch(String action) {
+        StringBuilder result = new StringBuilder();
+        result.append("boolean escaped = false;");
+        result.append("while (currentChar!='").append(expected).append("' || escaped)");
+        result.append("{\nif(!escaped && currentChar=='\\\\\\\\'){escaped=true;}\nelse {escaped=false;}\ncurrentChar = readNextChar();\n}");
+        result.append("\nif (currentChar=='").append(expected).append("'){");
+        result.append(action);
+        result.append("}\n");
+        return result.toString();
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleChar.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleChar.java
new file mode 100644
index 0000000..0e53374
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleChar.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rules;
+
+public class RuleChar implements Rule {
+
+    private char expected;
+
+    public RuleChar clone() {
+        return new RuleChar(expected);
+    }
+
+    public RuleChar(char expected) {
+        this.expected = expected;
+    }
+
+    @Override
+    public String toString() {
+        return String.valueOf(expected);
+    }
+
+    public char expectedChar() {
+        return expected;
+    }
+
+    @Override
+    public int hashCode() {
+        return (int) expected;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == null)
+            return false;
+        if (o instanceof RuleChar) {
+            if (((RuleChar) o).expected == this.expected) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public String javaAction() {
+        return "currentChar = readNextChar();";
+    }
+
+    @Override
+    public String javaMatch(String action) {
+        StringBuilder result = new StringBuilder();
+        result.append("if (currentChar=='");
+        result.append(expected);
+        result.append("'){");
+        result.append(action);
+        result.append("}");
+        return result.toString();
+    }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleDigitSequence.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleDigitSequence.java
new file mode 100644
index 0000000..13381e0
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleDigitSequence.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rules;
+
+public class RuleDigitSequence implements Rule {
+
+    public RuleDigitSequence clone() {
+        return new RuleDigitSequence();
+    }
+
+    @Override
+    public String toString() {
+        return " [0-9]+ ";
+    }
+
+    @Override
+    public int hashCode() {
+        return 1;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == null)
+            return false;
+        if (o instanceof RuleDigitSequence) {
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public String javaAction() {
+        return "";
+    }
+
+    @Override
+    public String javaMatch(String action) {
+        StringBuilder result = new StringBuilder();
+        result.append("if(currentChar >= '0' && currentChar<='9'){" + "\ncurrentChar = readNextChar();"
+                + "\nwhile(currentChar >= '0' && currentChar<='9'){" + "\ncurrentChar = readNextChar();" + "\n}\n");
+        result.append(action);
+        result.append("\n}");
+        return result.toString();
+    }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleEpsilon.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleEpsilon.java
new file mode 100644
index 0000000..41b7535
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleEpsilon.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rules;
+
+public class RuleEpsilon implements Rule {
+
+    public RuleEpsilon clone() {
+        return new RuleEpsilon();
+    }
+
+    @Override
+    public String toString() {
+        return "?";
+    }
+
+    @Override
+    public int hashCode() {
+        return 0;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == null)
+            return false;
+        if (o instanceof RuleEpsilon) {
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public String javaAction() {
+        return "";
+    }
+
+    @Override
+    public String javaMatch(String action) {
+        StringBuilder result = new StringBuilder();
+        result.append("{").append(action).append("}");
+        return result.toString();
+    }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RulePartial.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RulePartial.java
new file mode 100644
index 0000000..89caf4f
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RulePartial.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rules;
+
+public class RulePartial implements Rule {
+
+    private String partialName;
+
+    public RulePartial clone() {
+        return new RulePartial(partialName);
+    }
+
+    public RulePartial(String expected) {
+        this.partialName = expected;
+    }
+
+    public String getPartial() {
+        return this.partialName;
+    }
+
+    @Override
+    public String toString() {
+        return partialName;
+    }
+
+    @Override
+    public int hashCode() {
+        return (int) partialName.charAt(1);
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == null)
+            return false;
+        if (o instanceof RulePartial) {
+            if (((RulePartial) o).partialName.equals(this.partialName)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public String javaAction() {
+        return "";
+    }
+
+    @Override
+    public String javaMatch(String action) {
+        StringBuilder result = new StringBuilder();
+        result.append("if (parse_" + partialName + "(currentChar)==TOKEN_" + partialName + "){");
+        result.append(action);
+        result.append("}");
+        return result.toString();
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/Lexer.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/Lexer.java
new file mode 100644
index 0000000..8cee79d
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/Lexer.java
@@ -0,0 +1,219 @@
+package [PACKAGE]; 
+
+import java.io.IOException;
+import [PACKAGE].[LEXER_NAME]Exception;
+
+public class [LEXER_NAME] {
+
+    public static final int
+        TOKEN_EOF = 0, TOKEN_AUX_NOT_FOUND = 1 [TOKENS_CONSTANTS];
+
+    // Human representation of tokens. Useful for debug.
+    // Is possible to convert a TOKEN_CONSTANT in its image through
+    // [LEXER_NAME].tokenKindToString(TOKEN_CONSTANT); 
+    private static final String[] tokenImage = {
+            "<EOF>", "<AUX_NOT_FOUND>" [TOKENS_IMAGES]
+          };
+    
+    private static final char EOF_CHAR = 4;
+    protected java.io.Reader inputStream;
+    protected int column;
+    protected int line;
+    protected boolean prevCharIsCR;
+    protected boolean prevCharIsLF;
+    protected char[] buffer;
+    protected int bufsize;
+    protected int bufpos;
+    protected int tokenBegin;
+    protected int endOf_USED_Buffer;
+    protected int endOf_UNUSED_Buffer;
+    protected int maxUnusedBufferSize;
+
+// ================================================================================
+//  Auxiliary functions. Can parse the tokens used in the grammar as partial/auxiliary
+// ================================================================================
+
+    [LEXER_AUXFUNCTIONS]
+
+// ================================================================================
+//  Main method. Return a TOKEN_CONSTANT
+// ================================================================================            
+            
+    public int next() throws [LEXER_NAME]Exception, IOException{
+        char currentChar = buffer[bufpos];
+        while (currentChar == ' ' || currentChar=='\t' || currentChar == '\n' || currentChar=='\r')
+            currentChar = readNextChar(); 
+        tokenBegin = bufpos;
+        if (currentChar==EOF_CHAR) return TOKEN_EOF;
+
+        [LEXER_LOGIC]
+    }
+
+// ================================================================================
+//  Public interface
+// ================================================================================
+    
+    public [LEXER_NAME](java.io.Reader stream) throws IOException{
+        reInit(stream);
+    }
+
+    public void reInit(java.io.Reader stream) throws IOException{
+        done();
+        inputStream    = stream;
+        bufsize        = 4096;
+        line           = 1;
+        column         = 0;
+        bufpos         = -1;
+        endOf_UNUSED_Buffer = bufsize;
+        endOf_USED_Buffer = 0;
+        prevCharIsCR   = false;
+        prevCharIsLF   = false;
+        buffer         = new char[bufsize];
+        tokenBegin     = -1;
+        maxUnusedBufferSize = 4096/2;
+        readNextChar();
+    }
+
+    public String getLastTokenImage() {
+        if (bufpos >= tokenBegin)
+            return new String(buffer, tokenBegin, bufpos - tokenBegin);
+          else
+            return new String(buffer, tokenBegin, bufsize - tokenBegin) +
+                                  new String(buffer, 0, bufpos);
+    }
+    
+    public static String tokenKindToString(int token) {
+        return tokenImage[token]; 
+    }
+
+    public void done(){
+        buffer = null;
+    }
+
+// ================================================================================
+//  Parse error management
+// ================================================================================    
+    
+    protected int parseError(String reason) throws [LEXER_NAME]Exception {
+        StringBuilder message = new StringBuilder();
+        message.append(reason).append("\n");
+        message.append("Line: ").append(line).append("\n");
+        message.append("Row: ").append(column).append("\n");
+        throw new [LEXER_NAME]Exception(message.toString());
+    }
+
+    protected int parseError(int ... tokens) throws [LEXER_NAME]Exception {
+        StringBuilder message = new StringBuilder();
+        message.append("Error while parsing. ");
+        message.append(" Line: ").append(line);
+        message.append(" Row: ").append(column);
+        message.append(" Expecting:");
+        for (int tokenId : tokens){
+            message.append(" ").append([LEXER_NAME].tokenKindToString(tokenId));
+        }
+        throw new [LEXER_NAME]Exception(message.toString());
+    }
+    
+    protected void updateLineColumn(char c){
+        column++;
+    
+        if (prevCharIsLF)
+        {
+            prevCharIsLF = false;
+            line += (column = 1);
+        }
+        else if (prevCharIsCR)
+        {
+            prevCharIsCR = false;
+            if (c == '\n')
+            {
+                prevCharIsLF = true;
+            }
+            else
+            {
+                line += (column = 1);
+            }
+        }
+        
+        if (c=='\r') {
+            prevCharIsCR = true;
+        } else if(c == '\n') {
+            prevCharIsLF = true;
+        }
+    }
+    
+// ================================================================================
+//  Read data, buffer management. It uses a circular (and expandable) buffer
+// ================================================================================    
+
+    protected char readNextChar() throws IOException {
+        if (++bufpos >= endOf_USED_Buffer)
+            fillBuff();
+        char c = buffer[bufpos];
+        updateLineColumn(c);
+        return c;
+    }
+
+    protected boolean fillBuff() throws IOException {
+        if (endOf_UNUSED_Buffer == endOf_USED_Buffer) // If no more unused buffer space 
+        {
+          if (endOf_UNUSED_Buffer == bufsize)         // -- If the previous unused space was
+          {                                           // -- at the end of the buffer
+            if (tokenBegin > maxUnusedBufferSize)     // -- -- If the first N bytes before
+            {                                         //       the current token are enough
+              bufpos = endOf_USED_Buffer = 0;         // -- -- -- setup buffer to use that fragment 
+              endOf_UNUSED_Buffer = tokenBegin;
+            }
+            else if (tokenBegin < 0)                  // -- -- If no token yet
+              bufpos = endOf_USED_Buffer = 0;         // -- -- -- reuse the whole buffer
+            else
+              ExpandBuff(false);                      // -- -- Otherwise expand buffer after its end
+          }
+          else if (endOf_UNUSED_Buffer > tokenBegin)  // If the endOf_UNUSED_Buffer is after the token
+            endOf_UNUSED_Buffer = bufsize;            // -- set endOf_UNUSED_Buffer to the end of the buffer
+          else if ((tokenBegin - endOf_UNUSED_Buffer) < maxUnusedBufferSize)
+          {                                           // If between endOf_UNUSED_Buffer and the token
+            ExpandBuff(true);                         // there is NOT enough space expand the buffer                          
+          }                                           // reorganizing it
+          else 
+            endOf_UNUSED_Buffer = tokenBegin;         // Otherwise there is enough space at the start
+        }                                             // so we set the buffer to use that fragment
+        int i;
+        if ((i = inputStream.read(buffer, endOf_USED_Buffer, endOf_UNUSED_Buffer - endOf_USED_Buffer)) == -1)
+        {
+            inputStream.close();
+            buffer[endOf_USED_Buffer]=(char)EOF_CHAR;
+            endOf_USED_Buffer++;
+            return false;
+        }
+            else
+                endOf_USED_Buffer += i;
+        return true;
+    }
+
+
+    protected void ExpandBuff(boolean wrapAround)
+    {
+      char[] newbuffer = new char[bufsize + maxUnusedBufferSize];
+
+      try {
+        if (wrapAround) {
+          System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
+          System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos);
+          buffer = newbuffer;
+          endOf_USED_Buffer = (bufpos += (bufsize - tokenBegin));
+        }
+        else {
+          System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
+          buffer = newbuffer;
+          endOf_USED_Buffer = (bufpos -= tokenBegin);
+        }
+      } catch (Throwable t) {
+          throw new Error(t.getMessage());
+      }
+
+      bufsize += maxUnusedBufferSize;
+      endOf_UNUSED_Buffer = bufsize;
+      tokenBegin = 0;
+    }    
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/LexerException.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/LexerException.java
new file mode 100644
index 0000000..76aa8a4
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/LexerException.java
@@ -0,0 +1,13 @@
+package [PACKAGE];
+
+public class [LEXER_NAME]Exception extends Exception {
+    
+    public [LEXER_NAME]Exception(String message) {
+        super(message);
+    }
+    
+    private static final long serialVersionUID = 1L;
+
+}
+
+
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/default.config b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/default.config
new file mode 100644
index 0000000..7efbeb8
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/default.config
@@ -0,0 +1,16 @@
+# LEXER GENERATOR configuration file
+# ---------------------------------------
+# Place *first* the generic configuration
+# then list your grammar.
+
+PACKAGE:          com.my.lexer
+LEXER_NAME:       MyLexer
+OUTPUT_DIR:       output
+
+TOKENS:
+
+BOOLEAN_LIT = string(boolean)
+FALSE_LIT   = string(false)
+BOMB_LIT    = string(bomb)
+BONSAI_LIT  = string(bonsai)
+HELLO_LIT   = string(hello)
\ No newline at end of file
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/Fixtures.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/Fixtures.java
new file mode 100644
index 0000000..2ed2eaa
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/Fixtures.java
@@ -0,0 +1,100 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import edu.uci.ics.asterix.lexergenerator.rules.Rule;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+
+public class Fixtures {
+    static  String token_name       = "MYTOKEN";
+    static  String token2_name       = "MYTOKEN2";
+    static  String token_return     = "return TOKEN_MYTOKEN;\n";
+    static  String token2_return     = "return TOKEN_MYTOKEN2;\n";
+    static  String token_parseerror = "return parseError(TOKEN_MYTOKEN);\n";
+    static  String token_tostring   = "! ";
+    static  String rule_action      = "myaction";
+    static  String rule_name        = "myrule";
+    static  String rule_match       = "matchCheck("+rule_name+")";
+    static  String rule2_action     = "myaction2";
+    static  String rule2_name       = "myrule2";
+    static  String rule2_match      = "matchCheck2("+rule_name+")";
+    
+    static public Rule createRule(final String name){
+        return new Rule(){
+            String rule_name        = name;
+            String rule_action      = "myaction";
+            String rule_match       = "matchCheck("+rule_name+")";
+            
+            @Override
+            public Rule clone(){
+                return Fixtures.createRule(name+"_clone");
+            }
+            
+            @Override
+            public String javaAction() {
+                return rule_action;
+            }
+
+            @Override
+            public String javaMatch(String action) {
+                return rule_match+"{"+action+"}";
+            }
+            
+            @Override
+            public String toString(){
+                return rule_name;
+            }
+            
+        }; 
+    }
+    
+    static Rule rule = new Rule(){
+        
+        public Rule clone(){
+            return null;
+        }
+        
+        @Override
+        public String javaAction() {
+            return rule_action;
+        }
+
+        @Override
+        public String javaMatch(String action) {
+            return rule_match+"{"+action+"}";
+        }
+        
+        @Override
+        public String toString(){
+            return rule_name;
+        }
+        
+    }; 
+
+    static Rule rule2 = new Rule(){
+
+        public Rule clone(){
+            return null;
+        }
+        
+        @Override
+        public String javaAction() {
+            return rule2_action;
+        }
+
+        @Override
+        public String javaMatch(String act) {
+            return rule2_match+"{"+act+"}";
+        }
+        
+        @Override
+        public String toString(){
+            return rule2_name;
+        }
+        
+    };
+    
+    static RuleChar ruleA = new RuleChar('a');
+    static RuleChar ruleB = new RuleChar('b');
+    static RuleChar ruleC = new RuleChar('c');
+    static String ruleABC_action = "currentChar = readNextChar();";
+    
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAddRuleTest.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAddRuleTest.java
new file mode 100644
index 0000000..7541124
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAddRuleTest.java
@@ -0,0 +1,51 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import static edu.uci.ics.asterix.lexergenerator.Fixtures.*;
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+
+public class LexerNodeAddRuleTest {
+
+    @Test
+    public void NodeRuleRuleNodeNode() {
+        LexerNode node = new LexerNode();
+        node.append(rule);
+        node.add(rule2);
+        node.appendTokenName(token_name);
+        assertEquals(" ( " + rule_name +token_tostring + " || " + rule2_name + token_tostring + " ) ", node.toString());
+        assertEquals(rule_match+"{"
+                        +"\n" + rule_action
+                        +"\n" +token_return
+                     +"}"
+                     +rule2_match+"{"
+                        +"\n"+rule2_action
+                        +"\n"+token_return
+                     +"}"
+                     +token_parseerror , node.toJava());
+    }
+    
+    @Test
+    public void NodeSwitchCase() {
+        LexerNode node = new LexerNode();
+        node.append(ruleA);
+        node.add(ruleB);
+        node.add(ruleC);
+        node.appendTokenName(token_name);
+        assertEquals(" ( a" + token_tostring + " || b" + token_tostring + " || c" + token_tostring + " ) ", node.toString());
+        assertEquals("switch(currentChar){\n" +
+                "case 'a':" +
+                "\n" + ruleABC_action +
+                "\n" + token_return   +
+                "case 'b':" +
+                "\n" + ruleABC_action +
+                "\n" + token_return   +
+                "case 'c':" +
+                "\n" + ruleABC_action +
+                "\n" + token_return   +
+                "}\n"+ token_parseerror , node.toJava());
+    }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAppendNodeTest.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAppendNodeTest.java
new file mode 100644
index 0000000..5151e77
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAppendNodeTest.java
@@ -0,0 +1,81 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import static edu.uci.ics.asterix.lexergenerator.Fixtures.*;
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleEpsilon;
+
+public class LexerNodeAppendNodeTest {
+
+    @Test
+    public void AppendIsMergeIfNoActions() throws Exception {
+        LexerNode node = new LexerNode();
+        LexerNode node2 = new LexerNode();
+        node2.append(createRule("rule"));
+        node2.appendTokenName(token_name);
+        node.append(node2);
+        assertEquals("rule_clone! ", node.toString());
+    }
+
+    @Test
+    public void AppendIsAppend() throws Exception {
+        LexerNode node = new LexerNode();
+        node.append(createRule("A"));
+        LexerNode node2 = new LexerNode();
+        node2.append(createRule("rule"));
+        node2.appendTokenName(token_name);
+        node.append(node2);
+        assertEquals("Arule_clone! ", node.toString());
+    }
+    
+    @Test
+    public void AppendedNodesAreCloned() throws Exception {
+        LexerNode node = new LexerNode();
+        node.append(createRule("A"));
+        node.appendTokenName(token_name);
+        LexerNode node2 = new LexerNode();
+        node2.append(createRule("B"));
+        node2.appendTokenName(token2_name);
+        node.append(node2);
+        // TODO
+        // assertEquals("A! B_clone! ", node.toString());
+        
+        LexerNode node3 = new LexerNode();
+        node3.append(createRule("C"));
+        node3.append(createRule("D"));
+        node3.appendTokenName(token2_name);
+        node.append(node3);
+        // TODO
+        // assertEquals("A! B_clone! C_cloneD_clone! ", node.toString());
+    }
+
+    @Test
+    public void EpsilonRuleDoesNotPropagateAppended() throws Exception {
+        LexerNode node = new LexerNode();
+        node.append(new RuleEpsilon());
+        LexerNode node2 = new LexerNode();
+        node2.append(createRule("A"));
+        node2.appendTokenName(token2_name);
+        node.append(node2);
+        assertEquals("A_clone! ", node.toString());
+    }
+
+    @Test
+    public void EpsilonRuleIsRemovedAndIssueMerge() throws Exception {
+        LexerNode node = new LexerNode();
+        node.append(new RuleEpsilon());
+        LexerNode node2 = new LexerNode();
+        node2.append(createRule("A"));
+        node2.appendTokenName(token2_name);
+        node.append(node2);
+        node.add(new RuleEpsilon());
+        node.append(node2);
+        // TODO
+        // assertEquals(" ( A_clone! A_clone!  || A_clone!  ) ", node.toString());
+  }
+    
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAppendRuleTest.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAppendRuleTest.java
new file mode 100644
index 0000000..84fd292
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAppendRuleTest.java
@@ -0,0 +1,47 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import static edu.uci.ics.asterix.lexergenerator.Fixtures.*;
+
+public class LexerNodeAppendRuleTest {    
+    @Test
+    public void SingleNode() {
+        LexerNode node = new LexerNode();
+        node.appendTokenName(token_name);
+        assertEquals(token_tostring, node.toString());
+        assertEquals(token_return,   node.toJava());
+    }
+
+    @Test
+    public void NodeRuleNode() {
+        LexerNode node = new LexerNode();
+        node.append(rule);
+        node.appendTokenName(token_name);
+        assertEquals(rule_name+token_tostring, node.toString());
+        assertEquals(rule_match+"{"
+                        +"\n"+rule_action
+                        +"\n"+token_return
+                     +"}"+token_parseerror, node.toJava());
+    }
+
+    @Test
+    public void NodeRuleNodeRuleNode() {
+        LexerNode node = new LexerNode();
+        node.append(rule);
+        node.append(rule2);
+        node.appendTokenName(token_name);
+        assertEquals(rule_name+rule2_name+token_tostring, node.toString());
+        assertEquals(rule_match+"{"
+                        +"\n"+rule_action
+                        +"\n"+rule2_match+"{"
+                            +"\n"+rule2_action
+                            +"\n"+token_return
+                        +"}"
+                        +token_parseerror
+                     +"}"+token_parseerror, node.toJava());
+    }
+}
\ No newline at end of file
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAuxFunctionsTest.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAuxFunctionsTest.java
new file mode 100644
index 0000000..9f12c00
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAuxFunctionsTest.java
@@ -0,0 +1,111 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import static edu.uci.ics.asterix.lexergenerator.Fixtures.*;
+import static org.junit.Assert.*;
+
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.Set;
+
+import org.junit.Test;
+
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.Token;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleEpsilon;
+import edu.uci.ics.asterix.lexergenerator.rules.RulePartial;
+
+public class LexerNodeAuxFunctionsTest {
+    String expectedDifferentReturn = "return TOKEN_AUX_NOT_FOUND;\n"; 
+    
+    @Test
+    public void NodeRuleRuleNodeNode() {
+        LexerNode node = new LexerNode();
+        node.append(rule);
+        node.add(rule2);
+        node.appendTokenName(token_name);
+        assertEquals(" ( " + rule_name +token_tostring + " || " + rule2_name + token_tostring + " ) ", node.toString());
+        assertEquals(rule_match+"{"
+                        +"\n" + rule_action
+                        +"\n" +token_return
+                     +"}"
+                     +rule2_match+"{"
+                        +"\n"+rule2_action
+                        +"\n"+token_return
+                     +"}"
+                     +expectedDifferentReturn , node.toJavaAuxFunction());
+    }
+    
+    @Test
+    public void NodeSwitchCase() {
+        LexerNode node = new LexerNode();
+        node.append(ruleA);
+        node.add(ruleB);
+        node.add(ruleC);
+        node.appendTokenName(token_name);
+        assertEquals(" ( a" + token_tostring + " || b" + token_tostring + " || c" + token_tostring + " ) ", node.toString());
+        assertEquals("switch(currentChar){\n" +
+                "case 'a':" +
+                "\n" + ruleABC_action +
+                "\n" + token_return   +
+                "case 'b':" +
+                "\n" + ruleABC_action +
+                "\n" + token_return   +
+                "case 'c':" +
+                "\n" + ruleABC_action +
+                "\n" + token_return   +
+                "}\n"+ expectedDifferentReturn , node.toJavaAuxFunction());
+    }
+
+    @Test
+    public void NodeNeededAuxFunctions() {
+        LexerNode node = new LexerNode();
+        node.append(ruleA);
+        node.add(new RulePartial("token1"));
+        node.append(ruleC);
+        node.append(new RulePartial("token2"));
+        node.appendTokenName(token_name);
+        assertEquals(" ( actoken2!  || token1ctoken2!  ) ", node.toString());
+        Set<String> expectedNeededAuxFunctions = new HashSet<String>();
+        expectedNeededAuxFunctions.add("token1");
+        expectedNeededAuxFunctions.add("token2");
+        assertEquals(expectedNeededAuxFunctions, node.neededAuxFunctions());
+    }    
+
+    @Test(expected=Exception.class)
+    public void NodeExpandFirstActionError() throws Exception {
+        LexerNode node = new LexerNode();
+        node.append(ruleA);
+        node.add(new RulePartial("token1"));
+        node.append(ruleC);
+        node.add(new RuleEpsilon());
+        node.append(new RulePartial("token2"));
+        node.appendTokenName(token_name);
+        assertEquals(" ( actoken2!  || token1ctoken2!  || token2!  ) ", node.toString());
+        LinkedHashMap<String, Token> tokens = new LinkedHashMap<String, Token>();
+        try {
+            node.expandFirstAction(tokens);
+        } catch (Exception e) {
+            assertEquals("Cannot find a token used as part of another definition, missing token: token1", e.getMessage());
+            throw e;
+        }
+    } 
+
+    public void NodeExpandFirstAction() throws Exception {
+        LexerNode node = new LexerNode();
+        node.append(ruleA);
+        node.add(new RulePartial("token1"));
+        node.append(ruleC);
+        node.add(new RuleEpsilon());
+        node.append(new RulePartial("token2"));
+        node.appendTokenName(token_name);
+        assertEquals(" ( actoken2!  || token1ctoken2!  || token2!  ) ", node.toString());
+        LinkedHashMap<String, Token> tokens = new LinkedHashMap<String, Token>();
+        Token a = new Token("token1 = string(T1-blabla)", tokens);
+        Token b = new Token("token1 = string(T1-blabla)", tokens);
+        tokens.put("token1", a);
+        tokens.put("token2", b);
+        node.expandFirstAction(tokens);
+        assertEquals(" ( actoken2!  || T1-blablactoken2!  || T2-blabla!  ) ", node.toString());
+    }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeCloneTest.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeCloneTest.java
new file mode 100644
index 0000000..87e3ff4
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeCloneTest.java
@@ -0,0 +1,56 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import static edu.uci.ics.asterix.lexergenerator.Fixtures.*;
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+
+public class LexerNodeCloneTest {
+    
+    @Test
+    public void Depth1() throws Exception {
+        LexerNode node = new LexerNode();
+        LexerNode newNode = node.clone();
+        assertFalse(node == newNode);
+    }
+    
+    
+    @Test
+    public void Depth2() throws Exception {
+        LexerNode node = new LexerNode();
+        node.append(createRule("my1"));
+        node.add(createRule("my2"));
+        node.add(ruleA);
+        node.appendTokenName(token_name);
+        LexerNode newNode = node.clone();
+
+        assertEquals(" ( my1!  || my2!  || a!  ) ", node.toString());
+        assertEquals(" ( my1_clone!  || my2_clone!  || a!  ) ", newNode.toString());
+    }
+
+    @Test
+    public void Depth3() throws Exception {
+        LexerNode node = new LexerNode();
+        node.append(createRule("my1"));
+        node.add(createRule("my2"));
+        node.add(ruleA);
+        node.appendTokenName(token_name);
+        LexerNode node2 = new LexerNode();
+        node2.append(createRule("my3"));
+        node2.add(createRule("my4"));
+        node2.add(ruleB);
+        node2.appendTokenName(token2_name);
+        node.append(node2);
+        LexerNode newNode = node.clone();
+        // TODO
+        // assertEquals(" ( my1!  (  || my3_clone!  || my4_clone!  || b!  ) " +
+        //		     " || my2!  (  || my3_clone!  || my4_clone!  || b!  ) " +
+        //		     " || a!  (  || my3_clone!  || my4_clone!  || b!  )  ) ", node.toString());
+        // assertEquals(" ( my1_clone!  (  || my3_clone_clone!  || my4_clone_clone!  || b!  ) " +
+        //		     " || my2_clone!  (  || my3_clone_clone!  || my4_clone_clone!  || b!  ) " +
+        //		     " || a!  (  || my3_clone_clone!  || my4_clone_clone!  || b!  )  ) ", newNode.toString());
+    }
+    
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeMergeNodeTest.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeMergeNodeTest.java
new file mode 100644
index 0000000..4b22d99
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeMergeNodeTest.java
@@ -0,0 +1,83 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import static edu.uci.ics.asterix.lexergenerator.Fixtures.*;
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+
+public class LexerNodeMergeNodeTest {
+
+    @Test
+    public void MergeIsAdd() throws Exception {
+        LexerNode node = new LexerNode();
+        node.append(rule);
+        LexerNode node2 = new LexerNode();
+        node2.append(rule2);
+        node2.append(rule);
+        node2.merge(node);
+        node2.appendTokenName(token_name);
+        
+        LexerNode expected = new LexerNode();
+        expected.append(rule2);
+        expected.append(rule);
+        expected.add(rule);
+        expected.appendTokenName(token_name);
+        
+        assertEquals(expected.toString(), node2.toString());
+        assertEquals(expected.toJava(), node2.toJava());
+    }
+
+    @Test
+    public void MergeTwoToken() throws Exception {
+        LexerNode node = new LexerNode();
+        node.append(rule);
+        node.appendTokenName(token_name);
+        LexerNode node2 = new LexerNode();
+        node2.append(rule2);
+        node2.appendTokenName(token2_name);
+        node.merge(node2);
+
+        assertEquals(" ( "+rule_name+token_tostring+" || "+rule2_name+token_tostring+" ) ", node.toString());
+        assertEquals(rule_match + "{"
+        		+ "\n" + rule_action
+        		+ "\n" + token_return
+        +"}"+rule2_match+"{" 
+        + "\n" + rule2_action
+        + "\n" + token2_return
+        +"}return parseError(TOKEN_MYTOKEN,TOKEN_MYTOKEN2);\n"
+, node.toJava());
+    }
+
+    @Test(expected=Exception.class)
+    public void MergeConflict() throws Exception {
+        LexerNode node = new LexerNode();
+        node.append(rule);
+        node.appendTokenName(token_name);
+        LexerNode node2 = new LexerNode();
+        node2.append(rule);
+        node2.appendTokenName(token2_name);
+        try {
+            node.merge(node2);
+        } catch (Exception e) {
+            assertEquals("Rule conflict between: "+token_name +" and "+token2_name, e.getMessage());
+            throw e;
+        }
+    }
+
+    @Test
+    public void MergeWithoutConflictWithRemoveTokensName() throws Exception {
+        LexerNode node = new LexerNode();
+        node.append(rule);
+        node.append(rule);
+        node.appendTokenName(token_name);
+        LexerNode node2 = new LexerNode();
+        node2.append(rule);
+        node2.append(rule);
+        node2.appendTokenName(token2_name);
+        node2.removeTokensName();
+        node.merge(node2);
+        assertEquals(rule_name+rule_name+token_tostring, node.toString());
+    }
+}
diff --git a/asterix-maven-plugins/pom.xml b/asterix-maven-plugins/pom.xml
new file mode 100644
index 0000000..0677ffb
--- /dev/null
+++ b/asterix-maven-plugins/pom.xml
@@ -0,0 +1,21 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.asterix</groupId>
+  <artifactId>asterix-maven-plugins</artifactId>
+  <version>0.1</version>
+  <packaging>pom</packaging>
+
+  <dependencies>
+  	<dependency>
+  		<groupId>org.apache.maven</groupId>
+  		<artifactId>maven-plugin-api</artifactId>
+  		<version>2.2.1</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+
+  <modules>
+    <module>lexer-generator-maven-plugin</module>
+  </modules>
+</project>
diff --git a/asterix-metadata/pom.xml b/asterix-metadata/pom.xml
index 459d4d0..3e8e0f9 100644
--- a/asterix-metadata/pom.xml
+++ b/asterix-metadata/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
 	<parent>
 		<artifactId>asterix</artifactId>
@@ -6,6 +7,7 @@
 		<version>0.0.4-SNAPSHOT</version>
 	</parent>
 	<artifactId>asterix-metadata</artifactId>
+
 	<build>
 		<plugins>
 			<plugin>
@@ -13,8 +15,8 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
 				</configuration>
 			</plugin>
 		</plugins>
@@ -53,12 +55,12 @@
 			<version>0.0.4-SNAPSHOT</version>
 			<scope>compile</scope>
 		</dependency>
-                <dependency>
-                        <groupId>org.apache.hadoop</groupId>
-                        <artifactId>hadoop-core</artifactId>
-                        <version>0.20.2</version>
-                        <type>jar</type>
-                        <scope>compile</scope>
-                </dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-core</artifactId>
+			<version>0.20.2</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
 	</dependencies>
 </project>
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
index 6c3bb5e..66c02ee 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
@@ -104,7 +104,7 @@
     }
 
     public void dropDataset(String dataverseName, String datasetName) {
-        Dataset dataset = new Dataset(dataverseName, datasetName, null, null, null);
+        Dataset dataset = new Dataset(dataverseName, datasetName, null, null, null, null);
         droppedCache.addDatasetIfNotExists(dataset);
         logAndApply(new MetadataLogicalOperation(dataset, false));
     }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IAsterixStateProxy.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IAsterixStateProxy.java
index 2b915d9..5f772c7 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IAsterixStateProxy.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IAsterixStateProxy.java
@@ -32,8 +32,4 @@
     public IMetadataNode getMetadataNode() throws RemoteException;
 
     public AsterixProperties getAsterixProperties() throws RemoteException;
-
-    public Object getAsterixNodeState(String nodeName) throws RemoteException;
-
-    public void setAsterixNodeState(String nodeName, Object ns) throws RemoteException;
 }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntityTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntityTupleTranslator.java
index df61cd9..9c9d02b 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntityTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntityTupleTranslator.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -50,5 +50,5 @@
      *            Metadata entity to be written into a tuple.
      * @throws IOException
      */
-    public ITupleReference getTupleFromMetadataEntity(T metadataEntity) throws IOException;
+    public ITupleReference getTupleFromMetadataEntity(T metadataEntity) throws MetadataException, IOException;
 }
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/AsterixStateProxy.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/AsterixStateProxy.java
index e2c02fa..3946fa6 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/AsterixStateProxy.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/AsterixStateProxy.java
@@ -17,7 +17,6 @@
 
 import java.rmi.RemoteException;
 import java.rmi.server.UnicastRemoteObject;
-import java.util.HashMap;
 import java.util.logging.Logger;
 
 import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
@@ -33,7 +32,6 @@
     private IMetadataNode metadataNode;
     private AsterixProperties asterixProperties;
     private static final IAsterixStateProxy cc = new AsterixStateProxy();
-    private final HashMap<String, Object> nodeStateMap = new HashMap<String, Object>();
 
     public static IAsterixStateProxy registerRemoteObject() throws RemoteException {
         IAsterixStateProxy stub = (IAsterixStateProxy) UnicastRemoteObject.exportObject(cc, 0);
@@ -65,14 +63,4 @@
     public AsterixProperties getAsterixProperties() throws RemoteException {
         return this.asterixProperties;
     }
-
-    @Override
-    synchronized public Object getAsterixNodeState(String nodeName) throws RemoteException {
-        return nodeStateMap.get(nodeName);
-    }
-
-    @Override
-    synchronized public void setAsterixNodeState(String nodeName, Object ns) throws RemoteException {
-        nodeStateMap.put(nodeName, ns);
-    }
 }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
index 24fe244..d7c23e0 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -18,6 +18,7 @@
 import java.io.File;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
@@ -26,16 +27,15 @@
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
 import edu.uci.ics.asterix.common.context.AsterixAppRuntimeContext;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
 import edu.uci.ics.asterix.external.adapter.factory.IAdapterFactory;
 import edu.uci.ics.asterix.external.dataset.adapter.AdapterIdentifier;
 import edu.uci.ics.asterix.metadata.IDatasetDetails;
 import edu.uci.ics.asterix.metadata.MetadataManager;
 import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
 import edu.uci.ics.asterix.metadata.api.IMetadataIndex;
-import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
 import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinTypeMap;
 import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
 import edu.uci.ics.asterix.metadata.entities.Datatype;
 import edu.uci.ics.asterix.metadata.entities.Dataverse;
 import edu.uci.ics.asterix.metadata.entities.Index;
@@ -248,7 +248,7 @@
                     primaryIndexes[i].getNodeGroupName());
             MetadataManager.INSTANCE.addDataset(mdTxnCtx, new Dataset(primaryIndexes[i].getDataverseName(),
                     primaryIndexes[i].getIndexedDatasetName(), primaryIndexes[i].getPayloadRecordType().getTypeName(),
-                    id, DatasetType.INTERNAL));
+                    id, new HashMap<String,String>(), DatasetType.INTERNAL));
         }
     }
 
@@ -328,8 +328,8 @@
 
     private static DatasourceAdapter getAdapter(String adapterFactoryClassName) throws Exception {
         String adapterName = ((IAdapterFactory) (Class.forName(adapterFactoryClassName).newInstance())).getName();
-        return new DatasourceAdapter(new AdapterIdentifier(MetadataConstants.METADATA_DATAVERSE_NAME, adapterName), adapterFactoryClassName,
-                DatasourceAdapter.AdapterType.INTERNAL);
+        return new DatasourceAdapter(new AdapterIdentifier(MetadataConstants.METADATA_DATAVERSE_NAME, adapterName),
+                adapterFactoryClassName, DatasourceAdapter.AdapterType.INTERNAL);
     }
 
     public static void createIndex(IMetadataIndex dataset) throws Exception {
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
index 1db4886..679c2db 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -18,6 +18,8 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.om.types.AOrderedListType;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.AUnionType;
@@ -34,6 +36,7 @@
     public static ARecordType INTERNAL_DETAILS_RECORDTYPE;
     public static ARecordType EXTERNAL_DETAILS_RECORDTYPE;
     public static ARecordType FEED_DETAILS_RECORDTYPE;
+    public static ARecordType DATASET_HINTS_RECORDTYPE;
     public static ARecordType DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE;
     public static ARecordType FIELD_RECORDTYPE;
     public static ARecordType RECORD_RECORDTYPE;
@@ -48,31 +51,34 @@
     /**
      * Create all metadata record types.
      */
-    public static void init() {
+    public static void init() throws MetadataException {
         // Attention: The order of these calls is important because some types
         // depend on other types being created first.
         // These calls are one "dependency chain".
-        DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE = createDatasourceAdapterPropertiesRecordType();
-        INTERNAL_DETAILS_RECORDTYPE = createInternalDetailsRecordType();
-        EXTERNAL_DETAILS_RECORDTYPE = createExternalDetailsRecordType();
-        FEED_DETAILS_RECORDTYPE = createFeedDetailsRecordType();
+        try {
+            DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE = createPropertiesRecordType();
+            INTERNAL_DETAILS_RECORDTYPE = createInternalDetailsRecordType();
+            EXTERNAL_DETAILS_RECORDTYPE = createExternalDetailsRecordType();
+            FEED_DETAILS_RECORDTYPE = createFeedDetailsRecordType();
+            DATASET_HINTS_RECORDTYPE = createPropertiesRecordType();
+            DATASET_RECORDTYPE = createDatasetRecordType();
 
-        DATASET_RECORDTYPE = createDatasetRecordType();
+            // Starting another dependency chain.
+            FIELD_RECORDTYPE = createFieldRecordType();
+            RECORD_RECORDTYPE = createRecordTypeRecordType();
+            DERIVEDTYPE_RECORDTYPE = createDerivedTypeRecordType();
+            DATATYPE_RECORDTYPE = createDatatypeRecordType();
 
-        // Starting another dependency chain.
-        FIELD_RECORDTYPE = createFieldRecordType();
-        RECORD_RECORDTYPE = createRecordTypeRecordType();
-        DERIVEDTYPE_RECORDTYPE = createDerivedTypeRecordType();
-        DATATYPE_RECORDTYPE = createDatatypeRecordType();
-
-        // Independent of any other types.
-        DATAVERSE_RECORDTYPE = createDataverseRecordType();
-        INDEX_RECORDTYPE = createIndexRecordType();
-        NODE_RECORDTYPE = createNodeRecordType();
-        NODEGROUP_RECORDTYPE = createNodeGroupRecordType();
-        FUNCTION_RECORDTYPE = createFunctionRecordType();
-        DATASOURCE_ADAPTER_RECORDTYPE = createDatasourceAdapterRecordType();
-
+            // Independent of any other types.
+            DATAVERSE_RECORDTYPE = createDataverseRecordType();
+            INDEX_RECORDTYPE = createIndexRecordType();
+            NODE_RECORDTYPE = createNodeRecordType();
+            NODEGROUP_RECORDTYPE = createNodeGroupRecordType();
+            FUNCTION_RECORDTYPE = createFunctionRecordType();
+            DATASOURCE_ADAPTER_RECORDTYPE = createDatasourceAdapterRecordType();
+        } catch (AsterixException e) {
+            throw new MetadataException(e);
+        }
     }
 
     // Helper constants for accessing fields in an ARecord of type
@@ -81,17 +87,18 @@
     public static final int DATAVERSE_ARECORD_FORMAT_FIELD_INDEX = 1;
     public static final int DATAVERSE_ARECORD_TIMESTAMP_FIELD_INDEX = 2;
 
-    private static final ARecordType createDataverseRecordType() {
+    private static final ARecordType createDataverseRecordType() throws AsterixException {
         return new ARecordType("DataverseRecordType", new String[] { "DataverseName", "DataFormat", "Timestamp" },
                 new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING }, true);
     }
 
     // Helper constants for accessing fields in an ARecord of anonymous type
-    // external properties.
-    public static final int DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX = 0;
-    public static final int DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX = 1;
+    // dataset properties.
+    // Used for dataset hints or dataset adapter properties.
+    public static final int DATASOURCE_PROPERTIES_NAME_FIELD_INDEX = 0;
+    public static final int DATASOURCE_PROPERTIES_VALUE_FIELD_INDEX = 1;
 
-    private static final ARecordType createDatasourceAdapterPropertiesRecordType() {
+    private static final ARecordType createPropertiesRecordType() throws AsterixException {
         String[] fieldNames = { "Name", "Value" };
         IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING };
         return new ARecordType(null, fieldNames, fieldTypes, true);
@@ -105,7 +112,7 @@
     public static final int INTERNAL_DETAILS_ARECORD_PRIMARYKEY_FIELD_INDEX = 3;
     public static final int INTERNAL_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX = 4;
 
-    private static final ARecordType createInternalDetailsRecordType() {
+    private static final ARecordType createInternalDetailsRecordType() throws AsterixException {
         AOrderedListType olType = new AOrderedListType(BuiltinType.ASTRING, null);
         String[] fieldNames = { "FileStructure", "PartitioningStrategy", "PartitioningKey", "PrimaryKey", "GroupName" };
         IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, olType, olType, BuiltinType.ASTRING };
@@ -117,7 +124,7 @@
     public static final int EXTERNAL_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX = 0;
     public static final int EXTERNAL_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX = 1;
 
-    private static final ARecordType createExternalDetailsRecordType() {
+    private static final ARecordType createExternalDetailsRecordType() throws AsterixException {
 
         AOrderedListType orderedPropertyListType = new AOrderedListType(DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE, null);
         String[] fieldNames = { "DatasourceAdapter", "Properties" };
@@ -135,7 +142,7 @@
     public static final int FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX = 7;
     public static final int FEED_DETAILS_ARECORD_STATE_FIELD_INDEX = 8;
 
-    private static final ARecordType createFeedDetailsRecordType() {
+    private static final ARecordType createFeedDetailsRecordType() throws AsterixException {
         AOrderedListType orderedListType = new AOrderedListType(BuiltinType.ASTRING, null);
         AOrderedListType orderedListOfPropertiesType = new AOrderedListType(DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE,
                 null);
@@ -163,11 +170,12 @@
     public static final int DATASET_ARECORD_INTERNALDETAILS_FIELD_INDEX = 4;
     public static final int DATASET_ARECORD_EXTERNALDETAILS_FIELD_INDEX = 5;
     public static final int DATASET_ARECORD_FEEDDETAILS_FIELD_INDEX = 6;
-    public static final int DATASET_ARECORD_TIMESTAMP_FIELD_INDEX = 7;
+    public static final int DATASET_ARECORD_HINTS_FIELD_INDEX = 7;
+    public static final int DATASET_ARECORD_TIMESTAMP_FIELD_INDEX = 8;
 
-    private static final ARecordType createDatasetRecordType() {
+    private static final ARecordType createDatasetRecordType() throws AsterixException {
         String[] fieldNames = { "DataverseName", "DatasetName", "DataTypeName", "DatasetType", "InternalDetails",
-                "ExternalDetails", "FeedDetails", "Timestamp" };
+                "ExternalDetails", "FeedDetails", "Hints", "Timestamp" };
 
         List<IAType> internalRecordUnionList = new ArrayList<IAType>();
         internalRecordUnionList.add(BuiltinType.ANULL);
@@ -184,8 +192,11 @@
         feedRecordUnionList.add(FEED_DETAILS_RECORDTYPE);
         AUnionType feedRecordUnion = new AUnionType(feedRecordUnionList, null);
 
+        AUnorderedListType unorderedListOfHintsType = new AUnorderedListType(DATASET_HINTS_RECORDTYPE, null);
+
         IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
-                internalRecordUnion, externalRecordUnion, feedRecordUnion, BuiltinType.ASTRING };
+                internalRecordUnion, externalRecordUnion, feedRecordUnion, unorderedListOfHintsType,
+                BuiltinType.ASTRING };
         return new ARecordType("DatasetRecordType", fieldNames, fieldTypes, true);
     }
 
@@ -194,7 +205,7 @@
     public static final int FIELD_ARECORD_FIELDNAME_FIELD_INDEX = 0;
     public static final int FIELD_ARECORD_FIELDTYPE_FIELD_INDEX = 1;
 
-    private static final ARecordType createFieldRecordType() {
+    private static final ARecordType createFieldRecordType() throws AsterixException {
         String[] fieldNames = { "FieldName", "FieldType" };
         IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING };
         return new ARecordType(null, fieldNames, fieldTypes, true);
@@ -205,7 +216,7 @@
     public static final int RECORDTYPE_ARECORD_ISOPEN_FIELD_INDEX = 0;
     public static final int RECORDTYPE_ARECORD_FIELDS_FIELD_INDEX = 1;
 
-    private static final ARecordType createRecordTypeRecordType() {
+    private static final ARecordType createRecordTypeRecordType() throws AsterixException {
         AOrderedListType olType = new AOrderedListType(FIELD_RECORDTYPE, null);
         String[] fieldNames = { "IsOpen", "Fields" };
         IAType[] fieldTypes = { BuiltinType.ABOOLEAN, olType };
@@ -222,7 +233,7 @@
     public static final int DERIVEDTYPE_ARECORD_UNORDEREDLIST_FIELD_INDEX = 5;
     public static final int DERIVEDTYPE_ARECORD_ORDEREDLIST_FIELD_INDEX = 6;
 
-    private static final ARecordType createDerivedTypeRecordType() {
+    private static final ARecordType createDerivedTypeRecordType() throws AsterixException {
         String[] fieldNames = { "Tag", "IsAnonymous", "EnumValues", "Record", "Union", "UnorderedList", "OrderedList" };
         List<IAType> recordUnionList = new ArrayList<IAType>();
         recordUnionList.add(BuiltinType.ANULL);
@@ -251,7 +262,7 @@
     public static final int DATATYPE_ARECORD_DERIVED_FIELD_INDEX = 2;
     public static final int DATATYPE_ARECORD_TIMESTAMP_FIELD_INDEX = 3;
 
-    private static final ARecordType createDatatypeRecordType() {
+    private static final ARecordType createDatatypeRecordType() throws AsterixException {
         String[] fieldNames = { "DataverseName", "DatatypeName", "Derived", "Timestamp" };
         List<IAType> recordUnionList = new ArrayList<IAType>();
         recordUnionList.add(BuiltinType.ANULL);
@@ -271,7 +282,7 @@
     public static final int INDEX_ARECORD_ISPRIMARY_FIELD_INDEX = 5;
     public static final int INDEX_ARECORD_TIMESTAMP_FIELD_INDEX = 6;
 
-    private static final ARecordType createIndexRecordType() {
+    private static final ARecordType createIndexRecordType() throws AsterixException {
         AOrderedListType olType = new AOrderedListType(BuiltinType.ASTRING, null);
         String[] fieldNames = { "DataverseName", "DatasetName", "IndexName", "IndexStructure", "SearchKey",
                 "IsPrimary", "Timestamp" };
@@ -286,7 +297,7 @@
     public static final int NODE_ARECORD_NUMBEROFCORES_FIELD_INDEX = 1;
     public static final int NODE_ARECORD_WORKINGMEMORYSIZE_FIELD_INDEX = 2;
 
-    private static final ARecordType createNodeRecordType() {
+    private static final ARecordType createNodeRecordType() throws AsterixException {
         String[] fieldNames = { "NodeName", "NumberOfCores", "WorkingMemorySize" };
         IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.AINT32, BuiltinType.AINT32 };
         return new ARecordType("NodeRecordType", fieldNames, fieldTypes, true);
@@ -298,7 +309,7 @@
     public static final int NODEGROUP_ARECORD_NODENAMES_FIELD_INDEX = 1;
     public static final int NODEGROUP_ARECORD_TIMESTAMP_FIELD_INDEX = 2;
 
-    private static final ARecordType createNodeGroupRecordType() {
+    private static final ARecordType createNodeGroupRecordType() throws AsterixException {
         AUnorderedListType ulType = new AUnorderedListType(BuiltinType.ASTRING, null);
         String[] fieldNames = { "GroupName", "NodeNames", "Timestamp" };
         IAType[] fieldTypes = { BuiltinType.ASTRING, ulType, BuiltinType.ASTRING };
@@ -320,7 +331,7 @@
     public static final int FUNCTION_ARECORD_FUNCTION_LANGUAGE_FIELD_INDEX = 6;
     public static final int FUNCTION_ARECORD_FUNCTION_KIND_FIELD_INDEX = 7;
 
-    private static final ARecordType createFunctionRecordType() {
+    private static final ARecordType createFunctionRecordType() throws AsterixException {
 
         String[] fieldNames = { "DataverseName", "Name", "Arity", "Params", "ReturnType", "Definition", "Language",
                 "Kind" };
@@ -336,7 +347,7 @@
     public static final int DATASOURCE_ADAPTER_ARECORD_TYPE_FIELD_INDEX = 3;
     public static final int DATASOURCE_ADAPTER_ARECORD_TIMESTAMP_FIELD_INDEX = 4;
 
-    private static ARecordType createDatasourceAdapterRecordType() {
+    private static ARecordType createDatasourceAdapterRecordType() throws AsterixException {
         String[] fieldNames = { "DataverseName", "Name", "Classname", "Type", "Timestamp" };
         IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
                 BuiltinType.ASTRING };
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java
index b7ba1a1..e4c7ba2 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -15,6 +15,7 @@
 
 package edu.uci.ics.asterix.metadata.declared;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
@@ -63,46 +64,54 @@
         this.id = id;
         this.dataset = dataset;
         this.datasourceType = datasourceType;
-        switch (datasourceType) {
-            case FEED:
-                initFeedDataset(itemType, dataset);
-            case INTERNAL: {
-                initInternalDataset(itemType);
-                break;
+        try {
+            switch (datasourceType) {
+                case FEED:
+                    initFeedDataset(itemType, dataset);
+                case INTERNAL: {
+                    initInternalDataset(itemType);
+                    break;
+                }
+                case EXTERNAL_FEED:
+                case EXTERNAL: {
+                    initExternalDataset(itemType);
+                    break;
+                }
+                default: {
+                    throw new IllegalArgumentException();
+                }
             }
-            case EXTERNAL_FEED:
-            case EXTERNAL: {
-                initExternalDataset(itemType);
-                break;
-            }
-            default: {
-                throw new IllegalArgumentException();
-            }
+        } catch (IOException e) {
+            throw new AlgebricksException(e);
         }
     }
 
     public AqlDataSource(AqlSourceId id, Dataset dataset, IAType itemType) throws AlgebricksException {
         this.id = id;
         this.dataset = dataset;
-        switch (dataset.getDatasetType()) {
-            case FEED:
-                initFeedDataset(itemType, dataset);
-                break;
-            case INTERNAL:
-                initInternalDataset(itemType);
-                break;
-            case EXTERNAL: {
-                initExternalDataset(itemType);
-                break;
+        try {
+            switch (dataset.getDatasetType()) {
+                case FEED:
+                    initFeedDataset(itemType, dataset);
+                    break;
+                case INTERNAL:
+                    initInternalDataset(itemType);
+                    break;
+                case EXTERNAL: {
+                    initExternalDataset(itemType);
+                    break;
+                }
+                default: {
+                    throw new IllegalArgumentException();
+                }
             }
-            default: {
-                throw new IllegalArgumentException();
-            }
+        } catch (IOException e) {
+            throw new AlgebricksException(e);
         }
     }
 
     // TODO: Seems like initFeedDataset() could simply call this method.
-    private void initInternalDataset(IAType itemType) {
+    private void initInternalDataset(IAType itemType) throws IOException {
         List<String> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
         ARecordType recordType = (ARecordType) itemType;
         int n = partitioningKeys.size();
@@ -114,7 +123,7 @@
         domain = new AsterixNodeGroupDomain(DatasetUtils.getNodegroupName(dataset));
     }
 
-    private void initFeedDataset(IAType itemType, Dataset dataset) {
+    private void initFeedDataset(IAType itemType, Dataset dataset) throws IOException {
         if (dataset.getDatasetDetails() instanceof ExternalDatasetDetails) {
             initExternalDataset(itemType);
         } else {
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
index 7a65070..9183d6f 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -882,7 +882,7 @@
                     splitsAndConstraint.first, typeTraits, comparatorFactories, fieldPermutation, indexOp,
                     new BTreeDataflowHelperFactory(), filterFactory, NoOpOperationCallbackProvider.INSTANCE, jobTxnId);
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeInsert, splitsAndConstraint.second);
-        } catch (MetadataException e) {
+        } catch (MetadataException | IOException e) {
             throw new AlgebricksException(e);
         }
     }
@@ -949,8 +949,8 @@
                     new RTreeDataflowHelperFactory(valueProviderFactories), filterFactory,
                     NoOpOperationCallbackProvider.INSTANCE, jobTxnId);
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(rtreeUpdate, splitsAndConstraint.second);
-        } catch (MetadataException me) {
-            throw new AlgebricksException(me);
+        } catch (MetadataException | IOException e) {
+            throw new AlgebricksException(e);
         }
     }
 
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinTypeMap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinTypeMap.java
index 734237e..27cf542 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinTypeMap.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinTypeMap.java
@@ -40,6 +40,7 @@
         _builtinTypeMap.put("time", BuiltinType.ATIME);
         _builtinTypeMap.put("datetime", BuiltinType.ADATETIME);
         _builtinTypeMap.put("duration", BuiltinType.ADURATION);
+        _builtinTypeMap.put("interval", BuiltinType.AINTERVAL);
         _builtinTypeMap.put("point", BuiltinType.APOINT);
         _builtinTypeMap.put("point3d", BuiltinType.APOINT3D);
         _builtinTypeMap.put("line", BuiltinType.ALINE);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
index 976bd87..8f1e378 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
@@ -15,6 +15,8 @@
 
 package edu.uci.ics.asterix.metadata.entities;
 
+import java.util.Map;
+
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.metadata.IDatasetDetails;
 import edu.uci.ics.asterix.metadata.MetadataCache;
@@ -33,15 +35,18 @@
     // Type of items stored in this dataset.
     private final String itemTypeName;
     private final DatasetType datasetType;
-    private IDatasetDetails datasetDetails;
+    private final IDatasetDetails datasetDetails;
+    // Hints related to cardinatlity of dataset, avg size of tuples etc.
+    private final Map<String, String> hints;
 
     public Dataset(String dataverseName, String datasetName, String itemTypeName, IDatasetDetails datasetDetails,
-            DatasetType datasetType) {
+            Map<String, String> hints, DatasetType datasetType) {
         this.dataverseName = dataverseName;
         this.datasetName = datasetName;
         this.itemTypeName = itemTypeName;
         this.datasetType = datasetType;
         this.datasetDetails = datasetDetails;
+        this.hints = hints;
     }
 
     public String getDataverseName() {
@@ -64,8 +69,8 @@
         return datasetDetails;
     }
 
-    public void setDatasetDetails(IDatasetDetails datasetDetails) {
-        this.datasetDetails = datasetDetails;
+    public Map<String, String> getHints() {
+        return hints;
     }
 
     @Override
@@ -77,7 +82,7 @@
     public Object dropFromCache(MetadataCache cache) {
         return cache.dropDataset(this);
     }
-    
+
     @Override
     public boolean equals(Object other) {
         if (this == other) {
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/ExternalDatasetDetails.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/ExternalDatasetDetails.java
index 07da617..18cef340 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/ExternalDatasetDetails.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/ExternalDatasetDetails.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -12,6 +12,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package edu.uci.ics.asterix.metadata.entities;
 
 import java.io.DataOutput;
@@ -22,6 +23,7 @@
 import edu.uci.ics.asterix.builders.OrderedListBuilder;
 import edu.uci.ics.asterix.builders.RecordBuilder;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import edu.uci.ics.asterix.metadata.IDatasetDetails;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
@@ -76,7 +78,8 @@
         fieldValue.reset();
         aString.setValue(this.getAdapter());
         stringSerde.serialize(aString, fieldValue.getDataOutput());
-        externalRecordBuilder.addField(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX, fieldValue);
+        externalRecordBuilder.addField(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX,
+                fieldValue);
 
         // write field 1
         listBuilder.reset((AOrderedListType) externalRecordType.getFieldTypes()[1]);
@@ -93,8 +96,8 @@
 
         try {
             externalRecordBuilder.write(out, true);
-        } catch (IOException ioe) {
-            throw new HyracksDataException(ioe);
+        } catch (IOException | AsterixException e) {
+            throw new HyracksDataException(e);
         }
 
     }
@@ -121,8 +124,8 @@
 
         try {
             propertyRecordBuilder.write(out, true);
-        } catch (IOException ioe) {
-            throw new HyracksDataException(ioe);
+        } catch (IOException | AsterixException e) {
+            throw new HyracksDataException(e);
         }
     }
 }
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
index 367066b..22de3d3 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -12,6 +12,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package edu.uci.ics.asterix.metadata.entities;
 
 import java.io.DataOutput;
@@ -23,6 +24,7 @@
 import edu.uci.ics.asterix.builders.OrderedListBuilder;
 import edu.uci.ics.asterix.builders.RecordBuilder;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.common.functions.FunctionSignature;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
@@ -160,8 +162,8 @@
 
         try {
             feedRecordBuilder.write(out, true);
-        } catch (IOException ioe) {
-            throw new HyracksDataException(ioe);
+        } catch (IOException | AsterixException e) {
+            throw new HyracksDataException(e);
         }
 
     }
@@ -188,8 +190,8 @@
 
         try {
             propertyRecordBuilder.write(out, true);
-        } catch (IOException ioe) {
-            throw new HyracksDataException(ioe);
+        } catch (IOException | AsterixException e) {
+            throw new HyracksDataException(e);
         }
     }
 
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
index 2aade27..4265630 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
@@ -123,27 +123,27 @@
         }
         throw new AlgebricksException("Could not find field " + expr + " in the schema.");
     }
-    
+
     @Override
     public int hashCode() {
-    	return indexName.hashCode() ^ datasetName.hashCode() ^ dataverseName.hashCode();
+        return indexName.hashCode() ^ datasetName.hashCode() ^ dataverseName.hashCode();
     }
-    
+
     @Override
     public boolean equals(Object other) {
-    	if (!(other instanceof Index)) {
-    		return false;
-    	}
-    	Index otherIndex = (Index) other;
-    	if (!indexName.equals(otherIndex.getIndexName())) {
-    		return false;
-    	}
-    	if (!datasetName.equals(otherIndex.getDatasetName())) {
-    		return false;
-    	}
-    	if (!dataverseName.equals(otherIndex.getDataverseName())) {
-    		return false;
-    	}
-    	return true;
+        if (!(other instanceof Index)) {
+            return false;
+        }
+        Index otherIndex = (Index) other;
+        if (!indexName.equals(otherIndex.getIndexName())) {
+            return false;
+        }
+        if (!datasetName.equals(otherIndex.getDatasetName())) {
+            return false;
+        }
+        if (!dataverseName.equals(otherIndex.getDataverseName())) {
+            return false;
+        }
+        return true;
     }
 }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/InternalDatasetDetails.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/InternalDatasetDetails.java
index 51d154a..4267af2 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/InternalDatasetDetails.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/InternalDatasetDetails.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -12,6 +12,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package edu.uci.ics.asterix.metadata.entities;
 
 import java.io.DataOutput;
@@ -22,6 +23,7 @@
 import edu.uci.ics.asterix.builders.OrderedListBuilder;
 import edu.uci.ics.asterix.builders.RecordBuilder;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import edu.uci.ics.asterix.metadata.IDatasetDetails;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
@@ -146,8 +148,8 @@
 
         try {
             internalRecordBuilder.write(out, true);
-        } catch (IOException ioe) {
-            throw new HyracksDataException(ioe);
+        } catch (IOException | AsterixException e) {
+            throw new HyracksDataException(e);
         }
     }
 
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
index 3cc6f2f..a0aa36e 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -27,10 +27,14 @@
 import java.util.Map;
 
 import edu.uci.ics.asterix.builders.IARecordBuilder;
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.builders.UnorderedListBuilder;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.common.functions.FunctionSignature;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import edu.uci.ics.asterix.metadata.IDatasetDetails;
+import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
 import edu.uci.ics.asterix.metadata.entities.Dataset;
@@ -39,13 +43,18 @@
 import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
 import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails.FileStructure;
 import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy;
+import edu.uci.ics.asterix.om.base.AMutableString;
 import edu.uci.ics.asterix.om.base.ANull;
 import edu.uci.ics.asterix.om.base.AOrderedList;
 import edu.uci.ics.asterix.om.base.ARecord;
 import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.AUnorderedList;
 import edu.uci.ics.asterix.om.base.IACursor;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 /**
@@ -117,11 +126,9 @@
                 String value;
                 while (cursor.next()) {
                     ARecord field = (ARecord) cursor.get();
-                    key = ((AString) field
-                            .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX))
+                    key = ((AString) field.getValueByPos(MetadataRecordTypes.DATASOURCE_PROPERTIES_NAME_FIELD_INDEX))
                             .getStringValue();
-                    value = ((AString) field
-                            .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX))
+                    value = ((AString) field.getValueByPos(MetadataRecordTypes.DATASOURCE_PROPERTIES_VALUE_FIELD_INDEX))
                             .getStringValue();
                     properties.put(key, value);
                 }
@@ -194,21 +201,20 @@
                 String value;
                 while (cursor.next()) {
                     ARecord field = (ARecord) cursor.get();
-                    key = ((AString) field
-                            .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX))
+                    key = ((AString) field.getValueByPos(MetadataRecordTypes.DATASOURCE_PROPERTIES_NAME_FIELD_INDEX))
                             .getStringValue();
-                    value = ((AString) field
-                            .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX))
+                    value = ((AString) field.getValueByPos(MetadataRecordTypes.DATASOURCE_PROPERTIES_VALUE_FIELD_INDEX))
                             .getStringValue();
                     properties.put(key, value);
                 }
                 datasetDetails = new ExternalDatasetDetails(adapter, properties);
         }
-        return new Dataset(dataverseName, datasetName, typeName, datasetDetails, datasetType);
+        Map<String, String> hints = getDatasetHints(datasetRecord);
+        return new Dataset(dataverseName, datasetName, typeName, datasetDetails, hints, datasetType);
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Dataset dataset) throws IOException {
+    public ITupleReference getTupleFromMetadataEntity(Dataset dataset) throws IOException, MetadataException {
         // write the key in the first 2 fields of the tuple
         tupleBuilder.reset();
         aString.setValue(dataset.getDataverseName());
@@ -251,13 +257,33 @@
         writeDatasetDetailsRecordType(recordBuilder, dataset, fieldValue.getDataOutput());
 
         // write field 7
+        UnorderedListBuilder listBuilder = new UnorderedListBuilder();
+        listBuilder
+                .reset((AUnorderedListType) MetadataRecordTypes.DATASET_RECORDTYPE.getFieldTypes()[MetadataRecordTypes.DATASET_ARECORD_HINTS_FIELD_INDEX]);
+        ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
+        for (Map.Entry<String, String> property : dataset.getHints().entrySet()) {
+            String name = property.getKey();
+            String value = property.getValue();
+            itemValue.reset();
+            writeDatasetHintRecord(name, value, itemValue.getDataOutput());
+            listBuilder.addItem(itemValue);
+        }
+        fieldValue.reset();
+        listBuilder.write(fieldValue.getDataOutput(), true);
+        recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_HINTS_FIELD_INDEX, fieldValue);
+
+        // write field 8
         fieldValue.reset();
         aString.setValue(Calendar.getInstance().getTime().toString());
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
 
         // write record
-        recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        try {
+            recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        } catch (AsterixException e) {
+            throw new MetadataException(e);
+        }
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
@@ -282,4 +308,49 @@
 
     }
 
+    private Map<String, String> getDatasetHints(ARecord datasetRecord) {
+        Map<String, String> hints = new HashMap<String, String>();
+        String key;
+        String value;
+        AUnorderedList list = (AUnorderedList) datasetRecord
+                .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_HINTS_FIELD_INDEX);
+        IACursor cursor = list.getCursor();
+        while (cursor.next()) {
+            ARecord field = (ARecord) cursor.get();
+            key = ((AString) field.getValueByPos(MetadataRecordTypes.DATASOURCE_PROPERTIES_NAME_FIELD_INDEX))
+                    .getStringValue();
+            value = ((AString) field.getValueByPos(MetadataRecordTypes.DATASOURCE_PROPERTIES_VALUE_FIELD_INDEX))
+                    .getStringValue();
+            hints.put(key, value);
+        }
+        return hints;
+    }
+
+    private void writeDatasetHintRecord(String name, String value, DataOutput out) throws HyracksDataException {
+        IARecordBuilder propertyRecordBuilder = new RecordBuilder();
+        ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
+        propertyRecordBuilder.reset(MetadataRecordTypes.DATASET_HINTS_RECORDTYPE);
+        AMutableString aString = new AMutableString("");
+        ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+                .getSerializerDeserializer(BuiltinType.ASTRING);
+
+        // write field 0
+        fieldValue.reset();
+        aString.setValue(name);
+        stringSerde.serialize(aString, fieldValue.getDataOutput());
+        propertyRecordBuilder.addField(0, fieldValue);
+
+        // write field 1
+        fieldValue.reset();
+        aString.setValue(value);
+        stringSerde.serialize(aString, fieldValue.getDataOutput());
+        propertyRecordBuilder.addField(1, fieldValue);
+
+        try {
+            propertyRecordBuilder.write(out, true);
+        } catch (IOException | AsterixException ioe) {
+            throw new HyracksDataException(ioe);
+        }
+    }
+
 }
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
index 6353e99..4a5e4dcf 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2012 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -12,6 +12,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package edu.uci.ics.asterix.metadata.entitytupletranslators;
 
 import java.io.ByteArrayInputStream;
@@ -20,6 +21,7 @@
 import java.io.IOException;
 import java.util.Calendar;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.external.dataset.adapter.AdapterIdentifier;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import edu.uci.ics.asterix.metadata.MetadataException;
@@ -64,7 +66,8 @@
 
     private DatasourceAdapter createAdapterFromARecord(ARecord adapterRecord) {
         String dataverseName = ((AString) adapterRecord
-                .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
+                .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_DATAVERSENAME_FIELD_INDEX))
+                .getStringValue();
         String adapterName = ((AString) adapterRecord
                 .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_NAME_FIELD_INDEX)).getStringValue();
         String classname = ((AString) adapterRecord
@@ -76,7 +79,7 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(DatasourceAdapter adapter) throws IOException {
+    public ITupleReference getTupleFromMetadataEntity(DatasourceAdapter adapter) throws IOException, MetadataException {
         // write the key in the first 2 fields of the tuple
         tupleBuilder.reset();
         aString.setValue(adapter.getAdapterIdentifier().getNamespace());
@@ -121,7 +124,11 @@
         recordBuilder.addField(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
 
         // write record
-        recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        try {
+            recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        } catch (AsterixException e) {
+            throw new MetadataException(e);
+        }
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
index d37fbc6..185f35b 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -28,6 +28,7 @@
 import edu.uci.ics.asterix.builders.IARecordBuilder;
 import edu.uci.ics.asterix.builders.OrderedListBuilder;
 import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.MetadataNode;
@@ -140,8 +141,12 @@
                         fieldTypes[fieldId] = getTypeFromTypeName(dataverseName, fieldTypeName);
                         fieldId++;
                     }
-                    return new Datatype(dataverseName, datatypeName, new ARecordType(datatypeName, fieldNames,
-                            fieldTypes, isOpen), isAnonymous);
+                    try {
+                        return new Datatype(dataverseName, datatypeName, new ARecordType(datatypeName, fieldNames,
+                                fieldTypes, isOpen), isAnonymous);
+                    } catch (AsterixException e) {
+                        throw new MetadataException(e);
+                    }
                 }
                 case UNION: {
                     IACursor cursor = ((AOrderedList) derivedTypeRecord
@@ -189,7 +194,7 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Datatype dataType) throws IOException {
+    public ITupleReference getTupleFromMetadataEntity(Datatype dataType) throws IOException, MetadataException {
         // write the key in the first two fields of the tuple
         tupleBuilder.reset();
         aString.setValue(dataType.getDataverseName());
@@ -218,7 +223,11 @@
         ATypeTag tag = dataType.getDatatype().getTypeTag();
         if (isDerivedType(tag)) {
             fieldValue.reset();
-            writeDerivedTypeRecord(dataType, fieldValue.getDataOutput());
+            try {
+                writeDerivedTypeRecord(dataType, fieldValue.getDataOutput());
+            } catch (AsterixException e) {
+                throw new MetadataException(e);
+            }
             recordBuilder.addField(MetadataRecordTypes.DATATYPE_ARECORD_DERIVED_FIELD_INDEX, fieldValue);
         }
 
@@ -229,14 +238,18 @@
         recordBuilder.addField(MetadataRecordTypes.DATATYPE_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
 
         // write record
-        recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        try {
+            recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        } catch (AsterixException e) {
+            throw new MetadataException(e);
+        }
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
         return tuple;
     }
 
-    public void writeDerivedTypeRecord(Datatype type, DataOutput out) throws IOException {
+    private void writeDerivedTypeRecord(Datatype type, DataOutput out) throws IOException, AsterixException {
         DerivedTypeTag tag;
         IARecordBuilder derivedRecordBuilder = new RecordBuilder();
         ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
@@ -348,7 +361,7 @@
         listBuilder.write(dataOutput, true);
     }
 
-    public void writeRecordType(Datatype instance, DataOutput out) throws IOException {
+    private void writeRecordType(Datatype instance, DataOutput out) throws IOException, AsterixException {
 
         ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
         ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
index 271bf90..1e32004 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -21,7 +21,9 @@
 import java.io.IOException;
 import java.util.Calendar;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
 import edu.uci.ics.asterix.metadata.entities.Dataverse;
@@ -61,7 +63,7 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Dataverse instance) throws IOException {
+    public ITupleReference getTupleFromMetadataEntity(Dataverse instance) throws IOException, MetadataException {
         // write the key in the first field of the tuple
         tupleBuilder.reset();
         aString.setValue(instance.getDataverseName());
@@ -88,7 +90,11 @@
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         recordBuilder.addField(MetadataRecordTypes.DATAVERSE_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
 
-        recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        try {
+            recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        } catch (AsterixException e) {
+            throw new MetadataException(e);
+        }
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
index 8296a22..c34bc72 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -23,7 +23,9 @@
 import java.util.List;
 
 import edu.uci.ics.asterix.builders.OrderedListBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
 import edu.uci.ics.asterix.metadata.entities.Function;
@@ -48,7 +50,6 @@
     // Third key field.
     public static final int FUNCTION_FUNCTIONARITY_TUPLE_FIELD_INDEX = 2;
 
-    
     // Payload field containing serialized Function.
     public static final int FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX = 3;
 
@@ -103,7 +104,7 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Function function) throws IOException {
+    public ITupleReference getTupleFromMetadataEntity(Function function) throws IOException, MetadataException {
         // write the key in the first 2 fields of the tuple
         tupleBuilder.reset();
         aString.setValue(function.getDataverseName());
@@ -178,7 +179,11 @@
         recordBuilder.addField(MetadataRecordTypes.FUNCTION_ARECORD_FUNCTION_KIND_FIELD_INDEX, fieldValue);
 
         // write record
-        recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        try {
+            recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        } catch (AsterixException e) {
+            throw new MetadataException(e);
+        }
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
index d71480f..07d7cc9 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -25,7 +25,9 @@
 
 import edu.uci.ics.asterix.builders.OrderedListBuilder;
 import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
 import edu.uci.ics.asterix.metadata.entities.Index;
@@ -106,7 +108,7 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Index instance) throws IOException {
+    public ITupleReference getTupleFromMetadataEntity(Index instance) throws IOException, MetadataException {
         // write the key in the first 3 fields of the tuple
         tupleBuilder.reset();
         aString.setValue(instance.getDataverseName());
@@ -181,11 +183,19 @@
             aString.setValue(GRAM_LENGTH_FIELD_NAME);
             stringSerde.serialize(aString, nameValue.getDataOutput());
             intSerde.serialize(new AInt32(instance.getGramLength()), fieldValue.getDataOutput());
-            recordBuilder.addField(nameValue, fieldValue);
+            try {
+                recordBuilder.addField(nameValue, fieldValue);
+            } catch (AsterixException e) {
+                throw new MetadataException(e);
+            }
         }
 
         // write record
-        recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        try {
+            recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        } catch (AsterixException e) {
+            throw new MetadataException(e);
+        }
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
index da66d4b..ce72322 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -24,7 +24,9 @@
 import java.util.List;
 
 import edu.uci.ics.asterix.builders.UnorderedListBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
 import edu.uci.ics.asterix.metadata.entities.NodeGroup;
@@ -79,7 +81,7 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(NodeGroup instance) throws IOException {
+    public ITupleReference getTupleFromMetadataEntity(NodeGroup instance) throws IOException, MetadataException {
         // write the key in the first field of the tuple
         tupleBuilder.reset();
         aString.setValue(instance.getNodeGroupName());
@@ -114,7 +116,11 @@
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         recordBuilder.addField(MetadataRecordTypes.NODEGROUP_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
 
-        recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        try {
+            recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        } catch (AsterixException e) {
+            throw new MetadataException(e);
+        }
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
index 8d324b4..9e276cc 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -17,7 +17,9 @@
 
 import java.io.IOException;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
 import edu.uci.ics.asterix.metadata.entities.Node;
@@ -80,7 +82,7 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Node instance) throws IOException {
+    public ITupleReference getTupleFromMetadataEntity(Node instance) throws IOException, MetadataException {
         // write the key in the first field of the tuple
         tupleBuilder.reset();
         aString.setValue(instance.getNodeName());
@@ -121,7 +123,11 @@
         // listBuilder.write(fieldValue.getDataOutput());
         // recordBuilder.addField(3, fieldValue);
 
-        recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        try {
+            recordBuilder.write(tupleBuilder.getDataOutput(), true);
+        } catch (AsterixException e) {
+            throw new MetadataException(e);
+        }
         tupleBuilder.addFieldEndOffset();
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
         return tuple;
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/DatasetUtils.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/DatasetUtils.java
index cead5f2..df41c6e 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/DatasetUtils.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/DatasetUtils.java
@@ -1,5 +1,21 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.metadata.utils;
 
+import java.io.IOException;
 import java.util.List;
 
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
@@ -25,7 +41,12 @@
         List<String> partitioningKeys = getPartitioningKeys(dataset);
         IBinaryComparatorFactory[] bcfs = new IBinaryComparatorFactory[partitioningKeys.size()];
         for (int i = 0; i < partitioningKeys.size(); i++) {
-            IAType keyType = itemType.getFieldType(partitioningKeys.get(i));
+            IAType keyType;
+            try {
+                keyType = itemType.getFieldType(partitioningKeys.get(i));
+            } catch (IOException e) {
+                throw new AlgebricksException(e);
+            }
             bcfs[i] = comparatorFactoryProvider.getBinaryComparatorFactory(keyType, true);
         }
         return bcfs;
@@ -39,7 +60,12 @@
         List<String> partitioningKeys = getPartitioningKeys(dataset);
         IBinaryHashFunctionFactory[] bhffs = new IBinaryHashFunctionFactory[partitioningKeys.size()];
         for (int i = 0; i < partitioningKeys.size(); i++) {
-            IAType keyType = itemType.getFieldType(partitioningKeys.get(i));
+            IAType keyType;
+            try {
+                keyType = itemType.getFieldType(partitioningKeys.get(i));
+            } catch (IOException e) {
+                throw new AlgebricksException(e);
+            }
             bhffs[i] = hashFunProvider.getBinaryHashFunctionFactory(keyType);
         }
         return bhffs;
@@ -54,7 +80,12 @@
         int numKeys = partitioningKeys.size();
         ITypeTraits[] typeTraits = new ITypeTraits[numKeys + 1];
         for (int i = 0; i < numKeys; i++) {
-            IAType keyType = itemType.getFieldType(partitioningKeys.get(i));
+            IAType keyType;
+            try {
+                keyType = itemType.getFieldType(partitioningKeys.get(i));
+            } catch (IOException e) {
+                throw new AlgebricksException(e);
+            }
             typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
         }
         typeTraits[numKeys] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
diff --git a/asterix-om/pom.xml b/asterix-om/pom.xml
index 5368f07..8ff3f84 100644
--- a/asterix-om/pom.xml
+++ b/asterix-om/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
 	<parent>
 		<artifactId>asterix</artifactId>
@@ -6,6 +7,7 @@
 		<version>0.0.4-SNAPSHOT</version>
 	</parent>
 	<artifactId>asterix-om</artifactId>
+
 	<build>
 		<plugins>
 			<plugin>
@@ -13,8 +15,8 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
 				</configuration>
 			</plugin>
 		</plugins>
@@ -44,6 +46,6 @@
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-storage-am-rtree</artifactId>
-			</dependency>
+		</dependency>
 	</dependencies>
 </project>
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/builders/IARecordBuilder.java b/asterix-om/src/main/java/edu/uci/ics/asterix/builders/IARecordBuilder.java
index 544b532..7cdb9a9 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/builders/IARecordBuilder.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/builders/IARecordBuilder.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -18,6 +18,7 @@
 import java.io.DataOutput;
 import java.io.IOException;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.hyracks.data.std.api.IValueReference;
 
@@ -49,8 +50,10 @@
      *            The field name.
      * @param out
      *            The field value.
+     * @throws AsterixException
+     *             if the field name conflicts with a closed field name
      */
-    public void addField(IValueReference name, IValueReference value);
+    public void addField(IValueReference name, IValueReference value) throws AsterixException;
 
     /**
      * @param out
@@ -59,8 +62,10 @@
      *            Whether to write a typetag as part of the record's serialized
      *            representation.
      * @throws IOException
+     * @throws AsterixException
+     *             if any open field names conflict with each other
      */
-    public void write(DataOutput out, boolean writeTypeTag) throws IOException;
+    public void write(DataOutput out, boolean writeTypeTag) throws IOException, AsterixException;
 
     public int getFieldId(String fieldName);
 
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/builders/RecordBuilder.java b/asterix-om/src/main/java/edu/uci/ics/asterix/builders/RecordBuilder.java
index 03f0e20..f5d07ae 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/builders/RecordBuilder.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/builders/RecordBuilder.java
@@ -1,40 +1,46 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.builders;
 
+import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.util.Arrays;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.SerializerDeserializerUtil;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
 import edu.uci.ics.hyracks.data.std.api.IValueReference;
 import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 
 public class RecordBuilder implements IARecordBuilder {
-    private int openPartOffset;
+    private final static int DEFAULT_NUM_OPEN_FIELDS = 10;
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
 
-    private ARecordType recType;
-
-    private ByteArrayOutputStream closedPartOutputStream;
-    private int[] closedPartOffsets;
-    private int numberOfClosedFields;
-    private byte[] nullBitMap;
-    private int nullBitMapSize;
-
-    private ByteArrayOutputStream openPartOutputStream;
-    private long[] openPartOffsets;
-    private long[] tempOpenPartOffsets;
-
-    private int numberOfOpenFields;
-
-    private int fieldNameHashCode;
-    private final IBinaryHashFunction utf8HashFunction;
-
-    // for write()
     private int openPartOffsetArraySize;
     private byte[] openPartOffsetArray;
     private int offsetPosition;
@@ -42,25 +48,40 @@
     private boolean isOpen;
     private boolean isNullable;
     private int numberOfSchemaFields;
-    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-    private final static byte RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
+
+    private int openPartOffset;
+    private ARecordType recType;
+
+    private final IBinaryHashFunction utf8HashFunction;
+    private final IBinaryComparator utf8Comparator;
+
+    private final ByteArrayOutputStream closedPartOutputStream;
+    private int[] closedPartOffsets;
+    private int numberOfClosedFields;
+    private byte[] nullBitMap;
+    private int nullBitMapSize;
+
+    private final ByteArrayAccessibleOutputStream openPartOutputStream;
+    private long[] openPartOffsets;
+    private int[] openFieldNameLengths;
+
+    private int numberOfOpenFields;
 
     public RecordBuilder() {
 
         this.closedPartOutputStream = new ByteArrayOutputStream();
         this.numberOfClosedFields = 0;
 
-        this.openPartOutputStream = new ByteArrayOutputStream();
-        this.openPartOffsets = new long[20];
-        this.tempOpenPartOffsets = new long[20];
-
+        this.openPartOutputStream = new ByteArrayAccessibleOutputStream();
+        this.openPartOffsets = new long[DEFAULT_NUM_OPEN_FIELDS];
+        this.openFieldNameLengths = new int[DEFAULT_NUM_OPEN_FIELDS];
         this.numberOfOpenFields = 0;
 
-        this.fieldNameHashCode = 0;
         this.utf8HashFunction = new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY)
                 .createBinaryHashFunction();
+        this.utf8Comparator = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY)
+                .createBinaryComparator();
 
-        // for write()
         this.openPartOffsetArray = null;
         this.openPartOffsetArraySize = 0;
         this.offsetPosition = 0;
@@ -135,23 +156,31 @@
     }
 
     @Override
-    public void addField(IValueReference name, IValueReference value) {
+    public void addField(IValueReference name, IValueReference value) throws AsterixException {
         if (numberOfOpenFields == openPartOffsets.length) {
-            tempOpenPartOffsets = openPartOffsets;
-            openPartOffsets = new long[numberOfOpenFields + 20];
-            for (int i = 0; i < tempOpenPartOffsets.length; i++)
-                openPartOffsets[i] = tempOpenPartOffsets[i];
+            openPartOffsets = Arrays.copyOf(openPartOffsets, openPartOffsets.length + DEFAULT_NUM_OPEN_FIELDS);
+            openFieldNameLengths = Arrays.copyOf(openFieldNameLengths, openFieldNameLengths.length
+                    + DEFAULT_NUM_OPEN_FIELDS);
         }
-        fieldNameHashCode = utf8HashFunction.hash(name.getByteArray(), name.getStartOffset() + 1, name.getLength());
+        int fieldNameHashCode = utf8HashFunction.hash(name.getByteArray(), name.getStartOffset() + 1, name.getLength());
+        if (recType != null) {
+            int cFieldPos = recType.findFieldPosition(name.getByteArray(), name.getStartOffset() + 1,
+                    name.getLength() - 1);
+            if (cFieldPos >= 0) {
+                throw new AsterixException("Open field \"" + recType.getFieldNames()[cFieldPos]
+                        + "\" has the same field name as closed field at index " + cFieldPos);
+            }
+        }
         openPartOffsets[this.numberOfOpenFields] = fieldNameHashCode;
         openPartOffsets[this.numberOfOpenFields] = (openPartOffsets[numberOfOpenFields] << 32);
-        openPartOffsets[numberOfOpenFields++] += openPartOutputStream.size();
+        openPartOffsets[numberOfOpenFields] += openPartOutputStream.size();
+        openFieldNameLengths[numberOfOpenFields++] = name.getLength() - 1;
         openPartOutputStream.write(name.getByteArray(), name.getStartOffset() + 1, name.getLength() - 1);
         openPartOutputStream.write(value.getByteArray(), value.getStartOffset(), value.getLength());
     }
 
     @Override
-    public void write(DataOutput out, boolean writeTypeTag) throws IOException {
+    public void write(DataOutput out, boolean writeTypeTag) throws IOException, AsterixException {
         int h = headerSize;
         int recordLength;
         // prepare the open part
@@ -163,13 +192,27 @@
                 openPartOffsetArray = new byte[openPartOffsetArraySize];
 
             Arrays.sort(this.openPartOffsets, 0, numberOfOpenFields);
+            if (numberOfOpenFields > 1) {
+                byte[] openBytes = openPartOutputStream.getByteArray();
+                for (int i = 1; i < numberOfOpenFields; i++) {
+                    if (utf8Comparator.compare(openBytes, (int) openPartOffsets[i - 1], openFieldNameLengths[i - 1],
+                            openBytes, (int) openPartOffsets[i], openFieldNameLengths[i]) == 0) {
+                        String field = UTF8StringSerializerDeserializer.INSTANCE
+                                .deserialize(new DataInputStream(new ByteArrayInputStream(openBytes,
+                                        (int) openPartOffsets[i], openFieldNameLengths[i])));
+                        throw new AsterixException("Open fields " + (i - 1) + " and " + i
+                                + " have the same field name \"" + field + "\"");
+                    }
+                }
+            }
 
             openPartOffset = h + numberOfSchemaFields * 4 + closedPartOutputStream.size();
+            int fieldNameHashCode;
             for (int i = 0; i < numberOfOpenFields; i++) {
                 fieldNameHashCode = (int) (openPartOffsets[i] >> 32);
                 SerializerDeserializerUtil.writeIntToByteArray(openPartOffsetArray, (int) fieldNameHashCode,
                         offsetPosition);
-                int fieldOffset = (int) ((openPartOffsets[i] << 64) >> 64);
+                int fieldOffset = (int) openPartOffsets[i];
                 SerializerDeserializerUtil.writeIntToByteArray(openPartOffsetArray, fieldOffset + openPartOffset + 4
                         + openPartOffsetArraySize, offsetPosition + 4);
                 offsetPosition += 8;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/ADateOrTimeAscBinaryComparatorFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/ADateOrTimeAscBinaryComparatorFactory.java
new file mode 100644
index 0000000..464a03c
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/ADateOrTimeAscBinaryComparatorFactory.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.dataflow.data.nontagged.comparators;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+
+public class ADateOrTimeAscBinaryComparatorFactory implements IBinaryComparatorFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    public static final ADateOrTimeAscBinaryComparatorFactory INSTANCE = new ADateOrTimeAscBinaryComparatorFactory();
+
+    private ADateOrTimeAscBinaryComparatorFactory() {
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory#createBinaryComparator()
+     */
+    @Override
+    public IBinaryComparator createBinaryComparator() {
+        return new IBinaryComparator() {
+
+            @Override
+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+                int chrononTime1 = getInt(b1, s1);
+                int chrononTime2 = getInt(b2, s2);
+
+                if (chrononTime1 > chrononTime2) {
+                    return 1;
+                } else if (chrononTime1 < chrononTime2) {
+                    return -1;
+                } else {
+                    return 0;
+                }
+            }
+
+            private int getInt(byte[] bytes, int start) {
+                return ((bytes[start] & 0xff) << 24) + ((bytes[start + 1] & 0xff) << 16)
+                        + ((bytes[start + 2] & 0xff) << 8) + ((bytes[start + 3] & 0xff) << 0);
+            }
+        };
+    }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
index 881590d..0e486e7 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
@@ -24,15 +24,21 @@
     public IBinaryComparator createBinaryComparator() {
         return new IBinaryComparator() {
             final IBinaryComparator ascBoolComp = BooleanBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-            final IBinaryComparator ascIntComp = new PointableBinaryComparatorFactory(IntegerPointable.FACTORY).createBinaryComparator();
+            final IBinaryComparator ascIntComp = new PointableBinaryComparatorFactory(IntegerPointable.FACTORY)
+                    .createBinaryComparator();
             final IBinaryComparator ascLongComp = LongBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-            final IBinaryComparator ascStrComp = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY).createBinaryComparator();
-            final IBinaryComparator ascFloatComp = new PointableBinaryComparatorFactory(FloatPointable.FACTORY).createBinaryComparator();
-            final IBinaryComparator ascDoubleComp = new PointableBinaryComparatorFactory(DoublePointable.FACTORY).createBinaryComparator();
+            final IBinaryComparator ascStrComp = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY)
+                    .createBinaryComparator();
+            final IBinaryComparator ascFloatComp = new PointableBinaryComparatorFactory(FloatPointable.FACTORY)
+                    .createBinaryComparator();
+            final IBinaryComparator ascDoubleComp = new PointableBinaryComparatorFactory(DoublePointable.FACTORY)
+                    .createBinaryComparator();
             final IBinaryComparator ascRectangleComp = RectangleBinaryComparatorFactory.INSTANCE
                     .createBinaryComparator();
             final IBinaryComparator ascDateTimeComp = ADateTimeAscBinaryComparatorFactory.INSTANCE
                     .createBinaryComparator();
+            final IBinaryComparator ascDateOrTimeComp = ADateOrTimeAscBinaryComparatorFactory.INSTANCE
+                    .createBinaryComparator();
 
             @Override
             public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
@@ -73,6 +79,10 @@
                     case DATETIME: {
                         return ascDateTimeComp.compare(b1, s1 + 1, l1 - 1, b2, s2 + 1, l2 - 1);
                     }
+                    case TIME:
+                    case DATE: {
+                        return ascDateOrTimeComp.compare(b1, s1 + 1, l1 - 1, b2, s2 + 1, l2 - 1);
+                    }
                     default: {
                         throw new NotImplementedException("Comparison for type " + tag + " is not implemented");
                     }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinter.java
new file mode 100644
index 0000000..99c40b2
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinter.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.dataflow.data.nontagged.printers;
+
+import java.io.PrintStream;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.data.IPrinter;
+
+public class AIntervalPrinter implements IPrinter {
+
+    private static final long serialVersionUID = 1L;
+
+    public static final AIntervalPrinter INSTANCE = new AIntervalPrinter();
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.hyracks.algebricks.data.IPrinter#init()
+     */
+    @Override
+    public void init() throws AlgebricksException {
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.hyracks.algebricks.data.IPrinter#print(byte[], int, int, java.io.PrintStream)
+     */
+    @Override
+    public void print(byte[] b, int s, int l, PrintStream ps) throws AlgebricksException {
+        ps.print("interval(\"");
+
+        short typetag = AInt8SerializerDeserializer.getByte(b, s + 1 + 8 * 2);
+
+        IPrinter timeInstancePrinter;
+
+        if (typetag == ATypeTag.DATE.serialize()) {
+            timeInstancePrinter = ADatePrinter.INSTANCE;
+        } else if (typetag == ATypeTag.TIME.serialize()) {
+            timeInstancePrinter = ATimePrinter.INSTANCE;
+        } else if (typetag == ATypeTag.DATETIME.serialize()) {
+            timeInstancePrinter = ADateTimePrinter.INSTANCE;
+        } else {
+            throw new AlgebricksException("Unsupport internal time types in interval: " + typetag);
+        }
+
+        if (typetag == ATypeTag.TIME.serialize() || typetag == ATypeTag.DATE.serialize()) {
+            timeInstancePrinter.print(b, s + 1 + 4 - 1, 8, ps);
+            ps.print(", ");
+            timeInstancePrinter.print(b, s + 1 + 8 + 4 - 1, 8, ps);
+        } else {
+            timeInstancePrinter.print(b, s, 8, ps);
+            ps.print(", ");
+            timeInstancePrinter.print(b, s + 1 + 8 - 1, 8, ps);
+        }
+
+        ps.print("\")");
+    }
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJException.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinterFactory.java
similarity index 61%
rename from asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJException.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinterFactory.java
index 7c19a56..5500091 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJException.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinterFactory.java
@@ -12,25 +12,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package edu.uci.ics.asterix.api.aqlj.common;
+package edu.uci.ics.asterix.dataflow.data.nontagged.printers;
 
-/**
- * This is the base (and currently the only) exception class for AQLJ.
- * 
- * @author zheilbron
- */
-public class AQLJException extends Exception {
+import edu.uci.ics.hyracks.algebricks.data.IPrinter;
+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
+
+public class AIntervalPrinterFactory implements IPrinterFactory {
+
     private static final long serialVersionUID = 1L;
+    public static final AIntervalPrinterFactory INSTANCE = new AIntervalPrinterFactory();
 
-    public AQLJException(String message) {
-        super(message);
+    @Override
+    public IPrinter createPrinter() {
+        return AIntervalPrinter.INSTANCE;
     }
 
-    public AQLJException(String message, Throwable cause) {
-        super(message, cause);
-    }
-
-    public AQLJException(Throwable cause) {
-        super(cause);
-    }
 }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java
index edcfc8a..478ad2c 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java
@@ -73,6 +73,10 @@
                 ADurationPrinter.INSTANCE.print(b, s, l, ps);
                 break;
             }
+            case INTERVAL: {
+                AIntervalPrinter.INSTANCE.print(b, s, l, ps);
+                break;
+            }
             case POINT: {
                 APointPrinter.INSTANCE.print(b, s, l, ps);
                 break;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateSerializerDeserializer.java
index 9bd25f4..86a9a8d 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateSerializerDeserializer.java
@@ -21,7 +21,7 @@
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import edu.uci.ics.asterix.om.base.ADate;
 import edu.uci.ics.asterix.om.base.AMutableDate;
-import edu.uci.ics.asterix.om.base.temporal.ADateAndTimeParser;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
 import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
 import edu.uci.ics.asterix.om.base.temporal.StringCharSequenceAccessor;
 import edu.uci.ics.asterix.om.types.BuiltinType;
@@ -64,8 +64,8 @@
         long chrononTimeInMs = 0;
         try {
             StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
-            charAccessor.reset(date, 0);
-            chrononTimeInMs = ADateAndTimeParser.parseDatePart(charAccessor, true);
+            charAccessor.reset(date, 0, date.length());
+            chrononTimeInMs = ADateParserFactory.parseDatePart(charAccessor, true);
         } catch (Exception e) {
             throw new HyracksDataException(e);
         }
@@ -78,4 +78,8 @@
 
         dateSerde.serialize(aDate, out);
     }
+
+    public static int getChronon(byte[] byteArray, int offset) {
+        return AInt32SerializerDeserializer.getInt(byteArray, offset);
+    }
 }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateTimeSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateTimeSerializerDeserializer.java
index cb9e8e0..cedfc8d 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateTimeSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateTimeSerializerDeserializer.java
@@ -21,7 +21,8 @@
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import edu.uci.ics.asterix.om.base.ADateTime;
 import edu.uci.ics.asterix.om.base.AMutableDateTime;
-import edu.uci.ics.asterix.om.base.temporal.ADateAndTimeParser;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
 import edu.uci.ics.asterix.om.base.temporal.StringCharSequenceAccessor;
 import edu.uci.ics.asterix.om.types.BuiltinType;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -66,7 +67,7 @@
         long chrononTimeInMs = 0;
         try {
             StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
-            charAccessor.reset(datetime, 0);
+            charAccessor.reset(datetime, 0, datetime.length());
 
             // +1 if it is negative (-)
             short timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
@@ -78,11 +79,11 @@
             // if extended form 11, else 9
             timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11) : (short) (9);
 
-            chrononTimeInMs = ADateAndTimeParser.parseDatePart(charAccessor, false);
+            chrononTimeInMs = ADateParserFactory.parseDatePart(charAccessor, false);
 
-            charAccessor.reset(datetime, timeOffset);
+            charAccessor.reset(datetime, timeOffset, datetime.length() - timeOffset);
 
-            chrononTimeInMs += ADateAndTimeParser.parseTimePart(charAccessor);
+            chrononTimeInMs += ATimeParserFactory.parseTimePart(charAccessor);
         } catch (Exception e) {
             throw new HyracksDataException(e);
         }
@@ -90,4 +91,8 @@
 
         datetimeSerde.serialize(aDateTime, out);
     }
+
+    public static long getChronon(byte[] data, int offset) {
+        return AInt64SerializerDeserializer.getLong(data, offset);
+    }
 }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADurationSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADurationSerializerDeserializer.java
index c3333f0..88e2ea5 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADurationSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADurationSerializerDeserializer.java
@@ -7,7 +7,7 @@
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import edu.uci.ics.asterix.om.base.ADuration;
 import edu.uci.ics.asterix.om.base.AMutableDuration;
-import edu.uci.ics.asterix.om.base.temporal.ADurationParser;
+import edu.uci.ics.asterix.om.base.temporal.ADurationParserFactory;
 import edu.uci.ics.asterix.om.base.temporal.StringCharSequenceAccessor;
 import edu.uci.ics.asterix.om.types.BuiltinType;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -48,12 +48,34 @@
         try {
             AMutableDuration aDuration = new AMutableDuration(0, 0);
             StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
-            charAccessor.reset(duration, 0);
-            ADurationParser.parse(charAccessor, aDuration);
+            charAccessor.reset(duration, 0, duration.length());
+            ADurationParserFactory.parseDuration(charAccessor, aDuration);
 
             durationSerde.serialize(aDuration, out);
         } catch (Exception e) {
             throw new HyracksDataException(e);
         }
     }
+
+    /**
+     * Get the year-month field of the duration as an integer number of days.
+     * 
+     * @param data
+     * @param offset
+     * @return
+     */
+    public static int getYearMonth(byte[] data, int offset) {
+        return AInt32SerializerDeserializer.getInt(data, offset);
+    }
+
+    /**
+     * Get the day-time field of the duration as an long integer number of milliseconds.
+     * 
+     * @param data
+     * @param offset
+     * @return
+     */
+    public static long getDayTime(byte[] data, int offset) {
+        return AInt64SerializerDeserializer.getLong(data, offset + 4);
+    }
 }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AIntervalSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AIntervalSerializerDeserializer.java
new file mode 100644
index 0000000..7c87dfa
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AIntervalSerializerDeserializer.java
@@ -0,0 +1,264 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.dataflow.data.nontagged.serde;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.base.temporal.StringCharSequenceAccessor;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class AIntervalSerializerDeserializer implements ISerializerDeserializer<AInterval> {
+
+    private static final long serialVersionUID = 1L;
+
+    public static final AIntervalSerializerDeserializer INSTANCE = new AIntervalSerializerDeserializer();
+    @SuppressWarnings("unchecked")
+    private static final ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+            .getSerializerDeserializer(BuiltinType.AINTERVAL);
+
+    private static final String errorMessage = "This can not be an instance of interval";
+
+    private AIntervalSerializerDeserializer() {
+    }
+
+    @Override
+    public AInterval deserialize(DataInput in) throws HyracksDataException {
+        try {
+            return new AInterval(in.readLong(), in.readLong(), in.readByte());
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+
+    }
+
+    @Override
+    public void serialize(AInterval instance, DataOutput out) throws HyracksDataException {
+        try {
+            out.writeLong(instance.getIntervalStart());
+            out.writeLong(instance.getIntervalEnd());
+            out.writeByte(instance.getIntervalType());
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+
+    }
+
+    public static long getIntervalStart(byte[] data, int offset) {
+        return AInt64SerializerDeserializer.getLong(data, offset);
+    }
+
+    public static long getIntervalEnd(byte[] data, int offset) {
+        return AInt64SerializerDeserializer.getLong(data, offset + 8);
+    }
+
+    public static byte getIntervalTimeType(byte[] data, int offset) {
+        return data[offset + 8 * 2];
+    }
+
+    /**
+     * create an interval value from two given datetime instance.
+     * 
+     * @param interval
+     * @param out
+     * @throws HyracksDataException
+     */
+    public static void parseDatetime(String interval, DataOutput out) throws HyracksDataException {
+        AMutableInterval aInterval = new AMutableInterval(0l, 0l, (byte) 0);
+
+        long chrononTimeInMsStart = 0;
+        long chrononTimeInMsEnd = 0;
+        try {
+
+            StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
+
+            // Get the index for the comma
+            int commaIndex = interval.indexOf(',');
+            if (commaIndex < 0) {
+                throw new AlgebricksException("comma is missing for a string of interval");
+            }
+
+            int nonSpaceIndex = commaIndex - 1;
+            while (interval.charAt(nonSpaceIndex) == ' ') {
+                nonSpaceIndex--;
+            }
+
+            // Interval Start
+            charAccessor.reset(interval, 0, nonSpaceIndex + 1);
+
+            // +1 if it is negative (-)
+            short timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
+
+            if (charAccessor.getCharAt(timeOffset + 10) != 'T' && charAccessor.getCharAt(timeOffset + 8) != 'T') {
+                throw new AlgebricksException(errorMessage + ": missing T");
+            }
+
+            // if extended form 11, else 9
+            timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11) : (short) (9);
+
+            chrononTimeInMsStart = ADateParserFactory.parseDatePart(charAccessor, false);
+
+            charAccessor.reset(interval, timeOffset, nonSpaceIndex - timeOffset + 1);
+
+            chrononTimeInMsStart += ATimeParserFactory.parseTimePart(charAccessor);
+
+            // Interval End
+            nonSpaceIndex = commaIndex + 1;
+            while (interval.charAt(nonSpaceIndex) == ' ') {
+                nonSpaceIndex++;
+            }
+
+            charAccessor.reset(interval, nonSpaceIndex, interval.length() - nonSpaceIndex);
+
+            // +1 if it is negative (-)
+            timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
+
+            if (charAccessor.getCharAt(timeOffset + 10) != 'T' && charAccessor.getCharAt(timeOffset + 8) != 'T') {
+                throw new AlgebricksException(errorMessage + ": missing T");
+            }
+
+            // if extended form 11, else 9
+            timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11) : (short) (9);
+
+            chrononTimeInMsEnd = ADateParserFactory.parseDatePart(charAccessor, false);
+
+            charAccessor.reset(interval, nonSpaceIndex + timeOffset, interval.length() - nonSpaceIndex - timeOffset);
+
+            chrononTimeInMsEnd += ATimeParserFactory.parseTimePart(charAccessor);
+
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+
+        aInterval.setValue(chrononTimeInMsStart, chrononTimeInMsEnd, ATypeTag.DATETIME.serialize());
+
+        intervalSerde.serialize(aInterval, out);
+    }
+
+    public static void parseTime(String interval, DataOutput out) throws HyracksDataException {
+        AMutableInterval aInterval = new AMutableInterval(0l, 0l, (byte) 0);
+
+        long chrononTimeInMsStart = 0;
+        long chrononTimeInMsEnd = 0;
+        try {
+
+            StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
+
+            // Get the index for the comma
+            int commaIndex = interval.indexOf(',');
+            if (commaIndex < 0) {
+                throw new AlgebricksException("comma is missing for a string of interval");
+            }
+
+            int nonSpaceIndex = commaIndex - 1;
+            while (interval.charAt(nonSpaceIndex) == ' ') {
+                nonSpaceIndex--;
+            }
+
+            // Interval Start
+            charAccessor.reset(interval, 0, nonSpaceIndex + 1);
+            chrononTimeInMsStart = ATimeParserFactory.parseTimePart(charAccessor);
+
+            if (chrononTimeInMsStart < 0) {
+                chrononTimeInMsStart += GregorianCalendarSystem.CHRONON_OF_DAY;
+            }
+
+            // Interval End
+            nonSpaceIndex = commaIndex + 1;
+            while (interval.charAt(nonSpaceIndex) == ' ') {
+                nonSpaceIndex++;
+            }
+
+            charAccessor.reset(interval, nonSpaceIndex, interval.length() - nonSpaceIndex);
+            chrononTimeInMsEnd = ATimeParserFactory.parseTimePart(charAccessor);
+
+            if (chrononTimeInMsEnd < 0) {
+                chrononTimeInMsEnd += GregorianCalendarSystem.CHRONON_OF_DAY;
+            }
+
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+
+        aInterval.setValue(chrononTimeInMsStart, chrononTimeInMsEnd, ATypeTag.TIME.serialize());
+        intervalSerde.serialize(aInterval, out);
+    }
+
+    public static void parseDate(String interval, DataOutput out) throws HyracksDataException {
+        AMutableInterval aInterval = new AMutableInterval(0l, 0l, (byte) 0);
+
+        long chrononTimeInMsStart = 0;
+        long chrononTimeInMsEnd = 0;
+        short tempStart = 0;
+        short tempEnd = 0;
+        try {
+            StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
+
+            // Get the index for the comma
+            int commaIndex = interval.indexOf(',');
+            if (commaIndex < 0) {
+                throw new AlgebricksException("comma is missing for a string of interval");
+            }
+
+            int nonSpaceIndex = commaIndex - 1;
+            while (interval.charAt(nonSpaceIndex) == ' ') {
+                nonSpaceIndex--;
+            }
+
+            // Interval Start
+            charAccessor.reset(interval, 0, nonSpaceIndex + 1);
+
+            chrononTimeInMsStart = ADateParserFactory.parseDatePart(charAccessor, true);
+
+            if (chrononTimeInMsStart < 0 && chrononTimeInMsStart % GregorianCalendarSystem.CHRONON_OF_DAY != 0) {
+                tempStart = 1;
+            }
+
+            // Interval End
+            nonSpaceIndex = commaIndex + 1;
+            while (interval.charAt(nonSpaceIndex) == ' ') {
+                nonSpaceIndex++;
+            }
+
+            charAccessor.reset(interval, nonSpaceIndex, interval.length() - nonSpaceIndex);
+
+            chrononTimeInMsEnd = ADateParserFactory.parseDatePart(charAccessor, true);
+
+            if (chrononTimeInMsEnd < 0 && chrononTimeInMsEnd % GregorianCalendarSystem.CHRONON_OF_DAY != 0) {
+                tempEnd = 1;
+            }
+
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+
+        aInterval.setValue((chrononTimeInMsStart / GregorianCalendarSystem.CHRONON_OF_DAY) - tempStart,
+                (chrononTimeInMsEnd / GregorianCalendarSystem.CHRONON_OF_DAY) - tempEnd, ATypeTag.DATE.serialize());
+
+        intervalSerde.serialize(aInterval, out);
+    }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java
index 311a6bc..b5b7303 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java
@@ -15,6 +15,7 @@
 import edu.uci.ics.asterix.om.base.AInt32;
 import edu.uci.ics.asterix.om.base.AInt64;
 import edu.uci.ics.asterix.om.base.AInt8;
+import edu.uci.ics.asterix.om.base.AInterval;
 import edu.uci.ics.asterix.om.base.ALine;
 import edu.uci.ics.asterix.om.base.ANull;
 import edu.uci.ics.asterix.om.base.AOrderedList;
@@ -84,6 +85,9 @@
             case DURATION: {
                 return ADurationSerializerDeserializer.INSTANCE.deserialize(in);
             }
+            case INTERVAL: {
+                return AIntervalSerializerDeserializer.INSTANCE.deserialize(in);
+            }
             case POINT: {
                 return APointSerializerDeserializer.INSTANCE.deserialize(in);
             }
@@ -108,9 +112,9 @@
             case UNORDEREDLIST: {
                 return AUnorderedListSerializerDeserializer.SCHEMALESS_INSTANCE.deserialize(in);
             }
-                // case TYPE: {
-                // return AUnorderedListBytesConverter.INSTANCE.deserialize(in);
-                // }
+            // case TYPE: {
+            // return AUnorderedListBytesConverter.INSTANCE.deserialize(in);
+            // }
             default: {
                 throw new NotImplementedException("No serializer/deserializer implemented for type " + typeTag + " .");
             }
@@ -179,6 +183,10 @@
                 ADurationSerializerDeserializer.INSTANCE.serialize((ADuration) instance, out);
                 break;
             }
+            case INTERVAL: {
+                AIntervalSerializerDeserializer.INSTANCE.serialize((AInterval) instance, out);
+                break;
+            }
             case POINT: {
                 APointSerializerDeserializer.INSTANCE.serialize((APoint) instance, out);
                 break;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
index 393f634..a450f27 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -21,6 +21,7 @@
 
 import edu.uci.ics.asterix.builders.IARecordBuilder;
 import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
 import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFactoryProvider;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
@@ -149,7 +150,7 @@
             } else {
                 return new ARecord(this.recordType, closedFields);
             }
-        } catch (IOException e) {
+        } catch (IOException | AsterixException e) {
             throw new HyracksDataException(e);
         }
     }
@@ -166,7 +167,7 @@
         return fields;
     }
 
-    private ARecordType mergeRecordTypes(ARecordType recType1, ARecordType recType2) {
+    private ARecordType mergeRecordTypes(ARecordType recType1, ARecordType recType2) throws AsterixException {
 
         String[] fieldNames = new String[recType1.getFieldNames().length + recType2.getFieldNames().length];
         IAType[] fieldTypes = new IAType[recType1.getFieldTypes().length + recType2.getFieldTypes().length];
@@ -199,7 +200,7 @@
             }
             try {
                 recordBuilder.write(out, false);
-            } catch (IOException e) {
+            } catch (IOException | AsterixException e) {
                 throw new HyracksDataException(e);
             }
         } else {
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java
index 8860f2a..26e8d7a 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java
@@ -7,7 +7,7 @@
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import edu.uci.ics.asterix.om.base.AMutableTime;
 import edu.uci.ics.asterix.om.base.ATime;
-import edu.uci.ics.asterix.om.base.temporal.ADateAndTimeParser;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
 import edu.uci.ics.asterix.om.base.temporal.StringCharSequenceAccessor;
 import edu.uci.ics.asterix.om.types.BuiltinType;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -52,8 +52,8 @@
 
         try {
             StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
-            charAccessor.reset(time, 0);
-            chrononTimeInMs = ADateAndTimeParser.parseTimePart(charAccessor);
+            charAccessor.reset(time, 0, time.length());
+            chrononTimeInMs = ATimeParserFactory.parseTimePart(charAccessor);
         } catch (Exception e) {
             throw new HyracksDataException(e);
         }
@@ -63,4 +63,8 @@
         timeSerde.serialize(aTime, out);
     }
 
+    public static int getChronon(byte[] byteArray, int offset) {
+        return AInt32SerializerDeserializer.getInt(byteArray, offset);
+    }
+
 }
\ No newline at end of file
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
index 09c99b8..02f0e47 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
@@ -2,6 +2,7 @@
 
 import java.io.Serializable;
 
+import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.ADateOrTimeAscBinaryComparatorFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.ADateTimeAscBinaryComparatorFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.AObjectAscBinaryComparatorFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.AObjectDescBinaryComparatorFactory;
@@ -120,7 +121,9 @@
                 return addOffset(RectangleBinaryComparatorFactory.INSTANCE, ascending);
             }
             case DATE:
-            case TIME:
+            case TIME: {
+                return addOffset(ADateOrTimeAscBinaryComparatorFactory.INSTANCE, ascending);
+            }
             case DATETIME: {
                 return addOffset(ADateTimeAscBinaryComparatorFactory.INSTANCE, ascending);
             }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPrinterFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPrinterFactoryProvider.java
index b576ce0..fe8792f 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPrinterFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPrinterFactoryProvider.java
@@ -1,6 +1,5 @@
 package edu.uci.ics.asterix.formats.nontagged;
 
-
 import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ABooleanPrinterFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ACirclePrinterFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ADatePrinterFactory;
@@ -12,6 +11,7 @@
 import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AInt32PrinterFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AInt64PrinterFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AInt8PrinterFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AIntervalPrinterFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ALinePrinterFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ANullPrinterFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ANullableFieldPrinterFactory;
@@ -49,8 +49,8 @@
 
         if (aqlType != null) {
             switch (aqlType.getTypeTag()) {
-                // case ANYTYPE:
-                // return AAnyTypePrinterFactory.INSTANCE;
+            // case ANYTYPE:
+            // return AAnyTypePrinterFactory.INSTANCE;
                 case INT8:
                     return AInt8PrinterFactory.INSTANCE;
                 case INT16:
@@ -75,6 +75,8 @@
                     return ADateTimePrinterFactory.INSTANCE;
                 case DURATION:
                     return ADurationPrinterFactory.INSTANCE;
+                case INTERVAL:
+                    return AIntervalPrinterFactory.INSTANCE;
                 case POINT:
                     return APointPrinterFactory.INSTANCE;
                 case POINT3D:
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java
index 5492c5c..29e33fd 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java
@@ -16,6 +16,7 @@
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AIntervalSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ANullSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AObjectSerializerDeserializer;
@@ -111,6 +112,9 @@
             case DURATION: {
                 return ADurationSerializerDeserializer.INSTANCE;
             }
+            case INTERVAL: {
+                return AIntervalSerializerDeserializer.INSTANCE;
+            }
             case ORDEREDLIST: {
                 return new AOrderedListSerializerDeserializer((AOrderedListType) aqlType);
             }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlTypeTraitProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlTypeTraitProvider.java
index eac6602..a7e8e83 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlTypeTraitProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlTypeTraitProvider.java
@@ -14,6 +14,7 @@
     private static final ITypeTraits FOURBYTETYPETRAIT = new TypeTrait(4 + 1);
     private static final ITypeTraits EIGHTBYTETYPETRAIT = new TypeTrait(8 + 1);
     private static final ITypeTraits SIXTEENBYTETYPETRAIT = new TypeTrait(16 + 1);
+    private static final ITypeTraits SEVENTEENBYTETYPETRAIT = new TypeTrait(17 + 1);
     private static final ITypeTraits THIRTYTWOBYTETYPETRAIT = new TypeTrait(32 + 1);
     private static final ITypeTraits TWENTYFOURBYTETYPETRAIT = new TypeTrait(24 + 1);
 
@@ -42,6 +43,8 @@
                 return EIGHTBYTETYPETRAIT;
             case POINT:
                 return SIXTEENBYTETYPETRAIT;
+            case INTERVAL:
+                return SEVENTEENBYTETYPETRAIT;
             case POINT3D:
                 return TWENTYFOURBYTETYPETRAIT;
             case LINE:
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/ADate.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/ADate.java
index b6b22fe..0ef170e 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/ADate.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/ADate.java
@@ -33,7 +33,7 @@
      */
     protected int chrononTimeInDay;
 
-    private static long CHRONON_OF_DAY = 24 * 60 * 60 * 1000;
+    protected static long CHRONON_OF_DAY = 24 * 60 * 60 * 1000;
 
     public ADate(int chrononTimeInDay) {
         this.chrononTimeInDay = chrononTimeInDay;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AInterval.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AInterval.java
new file mode 100644
index 0000000..66a587a
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AInterval.java
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.visitors.IOMVisitor;
+
+public class AInterval implements IAObject {
+
+    protected long intervalStart;
+    protected long intervalEnd;
+    protected byte typetag;
+
+    public AInterval(long intervalStart, long intervalEnd, byte typetag) {
+        this.intervalStart = intervalStart;
+        this.intervalEnd = intervalEnd;
+        this.typetag = typetag;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.base.IAObject#getType()
+     */
+    @Override
+    public IAType getType() {
+        return BuiltinType.AINTERVAL;
+    }
+
+    public int compare(Object o) {
+        if (!(o instanceof AInterval)) {
+            return -1;
+        }
+        AInterval d = (AInterval) o;
+        if (d.intervalStart == this.intervalStart && d.intervalEnd == this.intervalEnd && d.typetag == this.typetag) {
+            return 0;
+        } else {
+            return -1;
+        }
+    }
+
+    public boolean equals(Object o) {
+        if (!(o instanceof AInterval)) {
+            return false;
+        } else {
+            AInterval t = (AInterval) o;
+            return (t.intervalStart == this.intervalStart || t.intervalEnd == this.intervalEnd
+                    && t.typetag == this.typetag);
+        }
+    }
+
+    @Override
+    public int hashCode() {
+        return (int) (((int) (this.intervalStart ^ (this.intervalStart >>> 32))) * 31 + (int) (this.intervalEnd ^ (this.intervalEnd >>> 32)))
+                * 31 + (int) this.typetag;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.base.IAObject#accept(edu.uci.ics.asterix.om.visitors.IOMVisitor)
+     */
+    @Override
+    public void accept(IOMVisitor visitor) throws AsterixException {
+        visitor.visitAInterval(this);
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.base.IAObject#deepEqual(edu.uci.ics.asterix.om.base.IAObject)
+     */
+    @Override
+    public boolean deepEqual(IAObject obj) {
+        return equals(obj);
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.base.IAObject#hash()
+     */
+    @Override
+    public int hash() {
+        return hashCode();
+    }
+
+    @Override
+    public String toString() {
+        StringBuilder sbder = new StringBuilder();
+        sbder.append("AInterval: { ");
+        if (typetag == ATypeTag.DATE.serialize()) {
+            sbder.append("ADate: { ");
+            GregorianCalendarSystem.getInstance().getExtendStringRepWithTimezoneUntilField(
+                    intervalStart * ADate.CHRONON_OF_DAY, 0, sbder, GregorianCalendarSystem.Fields.YEAR,
+                    GregorianCalendarSystem.Fields.DAY);
+            sbder.append(" }, ADate: {");
+            GregorianCalendarSystem.getInstance().getExtendStringRepWithTimezoneUntilField(
+                    intervalEnd * ADate.CHRONON_OF_DAY, 0, sbder, GregorianCalendarSystem.Fields.YEAR,
+                    GregorianCalendarSystem.Fields.DAY);
+            sbder.append(" }");
+        } else if (typetag == ATypeTag.TIME.serialize()) {
+            sbder.append("ATime: { ");
+            GregorianCalendarSystem.getInstance().getExtendStringRepWithTimezoneUntilField(intervalStart, 0, sbder,
+                    GregorianCalendarSystem.Fields.HOUR, GregorianCalendarSystem.Fields.MILLISECOND);
+            sbder.append(" }, ATime: { ");
+
+            GregorianCalendarSystem.getInstance().getExtendStringRepWithTimezoneUntilField(intervalEnd, 0, sbder,
+                    GregorianCalendarSystem.Fields.HOUR, GregorianCalendarSystem.Fields.MILLISECOND);
+            sbder.append(" }");
+        } else if (typetag == ATypeTag.DATETIME.serialize()) {
+            sbder.append("ADateTime: { ");
+            GregorianCalendarSystem.getInstance().getExtendStringRepWithTimezoneUntilField(intervalStart, 0, sbder,
+                    GregorianCalendarSystem.Fields.YEAR, GregorianCalendarSystem.Fields.MILLISECOND);
+            sbder.append(" }, ADateTime: { ");
+            GregorianCalendarSystem.getInstance().getExtendStringRepWithTimezoneUntilField(intervalEnd, 0, sbder,
+                    GregorianCalendarSystem.Fields.YEAR, GregorianCalendarSystem.Fields.MILLISECOND);
+            sbder.append(" }");
+        }
+        sbder.append(" }");
+        return sbder.toString();
+    }
+
+    public long getIntervalStart() {
+        return intervalStart;
+    }
+
+    public long getIntervalEnd() {
+        return intervalEnd;
+    }
+
+    public short getIntervalType() {
+        return typetag;
+    }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableInterval.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableInterval.java
new file mode 100644
index 0000000..055535f
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableInterval.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base;
+
+public class AMutableInterval extends AInterval {
+
+    public AMutableInterval(long intervalStart, long intervalEnd, byte typetag) {
+        super(intervalStart, intervalEnd, typetag);
+    }
+
+    public void setValue(long intervalStart, long intervalEnd, byte typetag) {
+        this.intervalStart = intervalStart;
+        this.intervalEnd = intervalEnd;
+        this.typetag = typetag;
+    }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateAndTimeParser.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateAndTimeParser.java
deleted file mode 100644
index 30ee525..0000000
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateAndTimeParser.java
+++ /dev/null
@@ -1,264 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.om.base.temporal;
-
-public class ADateAndTimeParser {
-
-    private static final GregorianCalendarSystem gCalInstance = GregorianCalendarSystem.getInstance();
-
-    private static final String dateErrorMessage = "Wrong date format!";
-    private static final String timeErrorMessage = "Wrong time format!";
-
-    /**
-     * Parse the given char sequence as a date string, and return the milliseconds represented by the date.
-     * 
-     * @param charAccessor
-     *            accessor for the char sequence
-     * @param isDateOnly
-     *            indicating whether it is a single date string, or it is the date part of a datetime string
-     * @param errorMessage
-     * @return
-     * @throws Exception
-     */
-    public static <T> long parseDatePart(ICharSequenceAccessor<T> charAccessor, boolean isDateOnly) throws Exception {
-
-        int length = charAccessor.getLength();
-        int offset = 0;
-
-        int year = 0, month = 0, day = 0;
-        boolean positive = true;
-
-        boolean isExtendedForm = false;
-
-        if (charAccessor.getCharAt(offset) == '-') {
-            offset++;
-            positive = false;
-        }
-
-        if ((isDateOnly) && charAccessor.getCharAt(offset + 4) == '-' || (!isDateOnly)
-                && charAccessor.getCharAt(offset + 13) == ':') {
-            isExtendedForm = true;
-        }
-
-        if (isExtendedForm) {
-            if (charAccessor.getCharAt(offset + 4) != '-' || charAccessor.getCharAt(offset + 7) != '-') {
-                throw new Exception(dateErrorMessage);
-            }
-        }
-
-        // year
-        for (int i = 0; i < 4; i++) {
-            if (charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9') {
-                year = year * 10 + charAccessor.getCharAt(offset + i) - '0';
-            } else {
-                throw new Exception(dateErrorMessage);
-            }
-        }
-
-        if (year < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.YEAR.ordinal()]
-                || year > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.YEAR.ordinal()]) {
-            throw new Exception(dateErrorMessage + ": year " + year);
-        }
-
-        offset += (isExtendedForm) ? 5 : 4;
-
-        // month
-        for (int i = 0; i < 2; i++) {
-            if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
-                month = month * 10 + charAccessor.getCharAt(offset + i) - '0';
-            } else {
-                throw new Exception(dateErrorMessage);
-            }
-        }
-
-        if (month < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.MONTH.ordinal()]
-                || month > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.MONTH.ordinal()]) {
-            throw new Exception(dateErrorMessage + ": month " + month);
-        }
-        offset += (isExtendedForm) ? 3 : 2;
-
-        // day
-        for (int i = 0; i < 2; i++) {
-            if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
-                day = day * 10 + charAccessor.getCharAt(offset + i) - '0';
-            } else {
-                throw new Exception(dateErrorMessage);
-            }
-        }
-
-        if (day < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.DAY.ordinal()]
-                || day > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.DAY.ordinal()]) {
-            throw new Exception(dateErrorMessage + ": day " + day);
-        }
-
-        offset += 2;
-
-        if (!positive) {
-            year *= -1;
-        }
-
-        if (isDateOnly && length > offset) {
-            throw new Exception(dateErrorMessage);
-        }
-        return gCalInstance.getChronon(year, month, day, 0, 0, 0, 0, 0);
-    }
-
-    /**
-     * Parse the given char sequence as a time string, and return the milliseconds represented by the time.
-     * 
-     * @param charAccessor
-     * @return
-     * @throws Exception
-     */
-    public static <T> int parseTimePart(ICharSequenceAccessor<T> charAccessor) throws Exception {
-
-        int length = charAccessor.getLength();
-        int offset = 0;
-
-        int hour = 0, min = 0, sec = 0, millis = 0;
-        int timezone = 0;
-
-        boolean isExtendedForm = false;
-        if (charAccessor.getCharAt(offset + 2) == ':') {
-            isExtendedForm = true;
-        }
-
-        if (isExtendedForm && (charAccessor.getCharAt(offset + 2) != ':' || charAccessor.getCharAt(offset + 5) != ':')) {
-            throw new Exception(timeErrorMessage);
-        }
-        // hour
-        for (int i = 0; i < 2; i++) {
-            if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
-                hour = hour * 10 + charAccessor.getCharAt(offset + i) - '0';
-            } else {
-                throw new Exception(timeErrorMessage);
-            }
-        }
-
-        if (hour < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.HOUR.ordinal()]
-                || hour > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.HOUR.ordinal()]) {
-            throw new Exception(timeErrorMessage + ": hour " + hour);
-        }
-
-        offset += (isExtendedForm) ? 3 : 2;
-
-        // minute
-        for (int i = 0; i < 2; i++) {
-            if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
-                min = min * 10 + charAccessor.getCharAt(offset + i) - '0';
-            } else {
-                throw new Exception(timeErrorMessage);
-            }
-        }
-
-        if (min < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.MINUTE.ordinal()]
-                || min > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.MINUTE.ordinal()]) {
-            throw new Exception(timeErrorMessage + ": min " + min);
-        }
-
-        offset += (isExtendedForm) ? 3 : 2;
-
-        // second
-        for (int i = 0; i < 2; i++) {
-            if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
-                sec = sec * 10 + charAccessor.getCharAt(offset + i) - '0';
-            } else {
-                throw new Exception(timeErrorMessage);
-            }
-        }
-
-        if (sec < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.SECOND.ordinal()]
-                || sec > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.SECOND.ordinal()]) {
-            throw new Exception(timeErrorMessage + ": sec " + sec);
-        }
-
-        offset += 2;
-
-        if ((isExtendedForm && length > offset && charAccessor.getCharAt(offset) == '.')
-                || (!isExtendedForm && length > offset)) {
-
-            offset += (isExtendedForm) ? 1 : 0;
-            int i = 0;
-            for (; i < 3 && offset + i < length; i++) {
-                if (charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9') {
-                    millis = millis * 10 + charAccessor.getCharAt(offset + i) - '0';
-                } else {
-                    break;
-                }
-            }
-
-            offset += i;
-
-            for (; i < 3; i++) {
-                millis = millis * 10;
-            }
-
-            // error is thrown if more than three digits are seen for the millisecond part
-            if (charAccessor.getCharAt(offset) >= '0' && charAccessor.getCharAt(offset) <= '9') {
-                throw new Exception("Wrong format of time instance: too many fields for millisecond.");
-            }
-        }
-
-        if (length > offset) {
-            if (charAccessor.getCharAt(offset) != 'Z') {
-                if ((charAccessor.getCharAt(offset) != '+' && charAccessor.getCharAt(offset) != '-')
-                        || (isExtendedForm && charAccessor.getCharAt(offset + 3) != ':')) {
-                    throw new Exception(timeErrorMessage);
-                }
-
-                short timezoneHour = 0;
-                short timezoneMinute = 0;
-
-                for (int i = 0; i < 2; i++) {
-                    if ((charAccessor.getCharAt(offset + 1 + i) >= '0' && charAccessor.getCharAt(offset + 1 + i) <= '9')) {
-                        timezoneHour = (short) (timezoneHour * 10 + charAccessor.getCharAt(offset + 1 + i) - '0');
-                    } else {
-                        throw new Exception(timeErrorMessage);
-                    }
-                }
-
-                if (timezoneHour < GregorianCalendarSystem.TIMEZONE_HOUR_MIN
-                        || timezoneHour > GregorianCalendarSystem.TIMEZONE_HOUR_MAX) {
-                    throw new Exception(timeErrorMessage + ": time zone hour " + timezoneHour);
-                }
-
-                int temp_offset = (isExtendedForm) ? 1 : 0;
-
-                for (int i = 0; i < 2; i++) {
-                    if ((charAccessor.getCharAt(offset + temp_offset + 3 + i) >= '0' && charAccessor.getCharAt(offset
-                            + temp_offset + 3 + i) <= '9')) {
-                        timezoneMinute = (short) (timezoneMinute * 10
-                                + charAccessor.getCharAt(offset + temp_offset + 3 + i) - '0');
-                    } else {
-                        throw new Exception(timeErrorMessage);
-                    }
-                }
-
-                if (timezoneMinute < GregorianCalendarSystem.TIMEZONE_MIN_MIN
-                        || timezoneMinute > GregorianCalendarSystem.TIMEZONE_MIN_MAX) {
-                    throw new Exception(timeErrorMessage + ": time zone minute " + timezoneMinute);
-                }
-
-                if (charAccessor.getCharAt(offset) == '-') {
-                    timezone = (byte) -((timezoneHour * 4) + timezoneMinute / 15);
-                } else {
-                    timezone = (byte) ((timezoneHour * 4) + timezoneMinute / 15);
-                }
-            }
-        }
-
-        return gCalInstance.getChronon(hour, min, sec, millis, timezone);
-    }
-}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateParserFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateParserFactory.java
new file mode 100644
index 0000000..8192919
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateParserFactory.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base.temporal;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+
+public class ADateParserFactory implements IValueParserFactory {
+
+    public static final IValueParserFactory INSTANCE = new ADateParserFactory();
+
+    private static final long serialVersionUID = 1L;
+
+    private static final String dateErrorMessage = "Wrong input format for a date value";
+
+    private ADateParserFactory() {
+
+    }
+
+    @Override
+    public IValueParser createValueParser() {
+
+        final CharArrayCharSequenceAccessor charArrayAccessor = new CharArrayCharSequenceAccessor();
+
+        return new IValueParser() {
+
+            @Override
+            public void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
+                charArrayAccessor.reset(buffer, start, length);
+                try {
+                    out.writeInt((int) (parseDatePart(charArrayAccessor, true) / GregorianCalendarSystem.CHRONON_OF_DAY));
+                } catch (IOException ex) {
+                    throw new HyracksDataException(ex);
+                }
+            }
+        };
+    }
+
+    /**
+     * Parse the given char sequence as a date string, and return the milliseconds represented by the date.
+     * 
+     * @param charAccessor
+     *            accessor for the char sequence
+     * @param isDateOnly
+     *            indicating whether it is a single date string, or it is the date part of a datetime string
+     * @param errorMessage
+     * @return
+     * @throws Exception
+     */
+    public static <T> long parseDatePart(ICharSequenceAccessor<T> charAccessor, boolean isDateOnly)
+            throws HyracksDataException {
+
+        int length = charAccessor.getLength();
+        int offset = 0;
+
+        int year = 0, month = 0, day = 0;
+        boolean positive = true;
+
+        boolean isExtendedForm = false;
+
+        if (charAccessor.getCharAt(offset) == '-') {
+            offset++;
+            positive = false;
+        }
+
+        if ((isDateOnly) && charAccessor.getCharAt(offset + 4) == '-' || (!isDateOnly)
+                && charAccessor.getCharAt(offset + 13) == ':') {
+            isExtendedForm = true;
+        }
+
+        if (isExtendedForm) {
+            if (charAccessor.getCharAt(offset + 4) != '-' || charAccessor.getCharAt(offset + 7) != '-') {
+                throw new HyracksDataException("Missing dash in the date string as an extended form");
+            }
+        }
+
+        // year
+        for (int i = 0; i < 4; i++) {
+            if (charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9') {
+                year = year * 10 + charAccessor.getCharAt(offset + i) - '0';
+            } else {
+                throw new HyracksDataException("Non-numeric value in year field");
+            }
+        }
+
+        if (year < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.YEAR.ordinal()]
+                || year > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.YEAR.ordinal()]) {
+            throw new HyracksDataException(dateErrorMessage + ": year " + year);
+        }
+
+        offset += (isExtendedForm) ? 5 : 4;
+
+        // month
+        for (int i = 0; i < 2; i++) {
+            if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
+                month = month * 10 + charAccessor.getCharAt(offset + i) - '0';
+            } else {
+                throw new HyracksDataException("Non-numeric value in month field");
+            }
+        }
+
+        if (month < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.MONTH.ordinal()]
+                || month > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.MONTH.ordinal()]) {
+            throw new HyracksDataException(dateErrorMessage + ": month " + month);
+        }
+        offset += (isExtendedForm) ? 3 : 2;
+
+        // day
+        for (int i = 0; i < 2; i++) {
+            if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
+                day = day * 10 + charAccessor.getCharAt(offset + i) - '0';
+            } else {
+                throw new HyracksDataException("Non-numeric value in day field");
+            }
+        }
+
+        if (day < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.DAY.ordinal()]
+                || day > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.DAY.ordinal()]) {
+            throw new HyracksDataException(dateErrorMessage + ": day " + day);
+        }
+
+        offset += 2;
+
+        if (!positive) {
+            year *= -1;
+        }
+
+        if (isDateOnly && length > offset) {
+            throw new HyracksDataException("Too many chars for a date only value");
+        }
+        return GregorianCalendarSystem.getInstance().getChronon(year, month, day, 0, 0, 0, 0, 0);
+    }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateTimeParserFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateTimeParserFactory.java
new file mode 100644
index 0000000..2df3c3b
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateTimeParserFactory.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base.temporal;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+
+public class ADateTimeParserFactory implements IValueParserFactory {
+
+    public static final IValueParserFactory INSTANCE = new ADateTimeParserFactory();
+
+    private static final long serialVersionUID = 1L;
+
+    private static final String dateTimeErrorMessage = "Wrong Input Format for a DateTime Value";
+
+    private ADateTimeParserFactory() {
+
+    }
+
+    @Override
+    public IValueParser createValueParser() {
+
+        final CharArrayCharSequenceAccessor charArrayAccessor = new CharArrayCharSequenceAccessor();
+
+        return new IValueParser() {
+
+            @Override
+            public void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
+                long chrononTimeInMs = 0;
+
+                charArrayAccessor.reset(buffer, start, length);
+
+                short timeOffset = (short) ((charArrayAccessor.getCharAt(0) == '-') ? 1 : 0);
+
+                if (charArrayAccessor.getCharAt(timeOffset + 10) != 'T'
+                        && charArrayAccessor.getCharAt(timeOffset + 8) != 'T') {
+                    throw new HyracksDataException(dateTimeErrorMessage + ": missing T");
+                }
+
+                // if extended form 11, else 9
+                timeOffset += (charArrayAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11) : (short) (9);
+
+                chrononTimeInMs = ADateParserFactory.parseDatePart(charArrayAccessor, false);
+
+                charArrayAccessor.reset(buffer, start + timeOffset, length - timeOffset);
+
+                chrononTimeInMs += ATimeParserFactory.parseTimePart(charArrayAccessor);
+
+                try {
+                    out.writeLong(chrononTimeInMs);
+                } catch (IOException ex) {
+                    throw new HyracksDataException(ex);
+                }
+            }
+        };
+    }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADurationParser.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADurationParserFactory.java
similarity index 62%
rename from asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADurationParser.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADurationParserFactory.java
index 5d43bba..b176061 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADurationParser.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADurationParserFactory.java
@@ -14,9 +14,45 @@
  */
 package edu.uci.ics.asterix.om.base.temporal;
 
-import edu.uci.ics.asterix.om.base.AMutableDuration;
+import java.io.DataOutput;
+import java.io.IOException;
 
-public class ADurationParser {
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+
+public class ADurationParserFactory implements IValueParserFactory {
+
+    public static final IValueParserFactory INSTANCE = new ADurationParserFactory();
+
+    private static final long serialVersionUID = 1L;
+
+    private static final String durationErrorMessage = "Wrong Input Format for a Duration Value";
+
+    private ADurationParserFactory() {
+
+    }
+
+    @Override
+    public IValueParser createValueParser() {
+        final CharArrayCharSequenceAccessor charArrayAccessor = new CharArrayCharSequenceAccessor();
+        final AMutableDuration aMutableDuration = new AMutableDuration(0, 0);
+        return new IValueParser() {
+
+            @Override
+            public void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
+                charArrayAccessor.reset(buffer, start, length);
+                parseDuration(charArrayAccessor, aMutableDuration);
+                try {
+                    out.writeInt(aMutableDuration.getMonths());
+                    out.writeLong(aMutableDuration.getMilliseconds());
+                } catch (IOException ex) {
+                    throw new HyracksDataException(ex);
+                }
+            }
+        };
+    }
 
     private enum State {
         NOTHING_READ,
@@ -30,9 +66,8 @@
         SEC;
     };
 
-    private static final String errorMessage = "This can not be an instance of duration";
-
-    public static <T> void parse(ICharSequenceAccessor<T> charAccessor, AMutableDuration aDuration) throws Exception {
+    public static <T> void parseDuration(ICharSequenceAccessor<T> charAccessor, AMutableDuration aDuration)
+            throws HyracksDataException {
 
         boolean positive = true;
         int offset = 0;
@@ -45,7 +80,7 @@
         }
 
         if (charAccessor.getCharAt(offset++) != 'P') {
-            throw new Exception(errorMessage);
+            throw new HyracksDataException(durationErrorMessage + ": Missing leading 'P'.");
         }
 
         for (; offset < charAccessor.getLength(); offset++) {
@@ -59,7 +94,7 @@
                             year = value;
                             state = State.YEAR;
                         } else {
-                            throw new Exception(errorMessage);
+                            throw new HyracksDataException(durationErrorMessage + ": wrong YEAR feild.");
                         }
                         break;
                     case 'M':
@@ -68,13 +103,13 @@
                                 month = value;
                                 state = State.MONTH;
                             } else {
-                                throw new Exception(errorMessage);
+                                throw new HyracksDataException(durationErrorMessage + ": wrong MONTH field.");
                             }
                         } else if (state.compareTo(State.MIN) < 0) {
                             minute = value;
                             state = State.MIN;
                         } else {
-                            throw new Exception(errorMessage);
+                            throw new HyracksDataException(durationErrorMessage + ": wrong MIN field.");
                         }
                         break;
                     case 'D':
@@ -82,14 +117,14 @@
                             day = value;
                             state = State.DAY;
                         } else {
-                            throw new Exception(errorMessage);
+                            throw new HyracksDataException(durationErrorMessage + ": wrong DAY field");
                         }
                         break;
                     case 'T':
                         if (state.compareTo(State.TIME) < 0) {
                             state = State.TIME;
                         } else {
-                            throw new Exception(errorMessage);
+                            throw new HyracksDataException(durationErrorMessage + ": wrong TIME field.");
                         }
                         break;
 
@@ -98,7 +133,7 @@
                             hour = value;
                             state = State.HOUR;
                         } else {
-                            throw new Exception(errorMessage);
+                            throw new HyracksDataException(durationErrorMessage + ": wrong HOUR field.");
                         }
                         break;
                     case '.':
@@ -110,7 +145,8 @@
                                     if (i < 4) {
                                         millisecond = millisecond * 10 + (charAccessor.getCharAt(offset + i) - '0');
                                     } else {
-                                        throw new Exception(errorMessage);
+                                        throw new HyracksDataException(durationErrorMessage
+                                                + ": wrong MILLISECOND field.");
                                     }
                                 } else {
                                     break;
@@ -119,18 +155,18 @@
                             offset += i;
                             state = State.MILLISEC;
                         } else {
-                            throw new Exception(errorMessage);
+                            throw new HyracksDataException(durationErrorMessage + ": wrong MILLISECOND field.");
                         }
                     case 'S':
                         if (state.compareTo(State.SEC) < 0) {
                             second = value;
                             state = State.SEC;
                         } else {
-                            throw new Exception(errorMessage);
+                            throw new HyracksDataException(durationErrorMessage + ": wrong SECOND field.");
                         }
                         break;
                     default:
-                        throw new Exception(errorMessage);
+                        throw new HyracksDataException(durationErrorMessage + ": wrong format for duration.");
 
                 }
                 value = 0;
@@ -138,7 +174,7 @@
         }
 
         if (state.compareTo(State.TIME) == 0) {
-            throw new Exception(errorMessage);
+            throw new HyracksDataException(durationErrorMessage + ": no time fields after time separator.");
         }
 
         short temp = 1;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ATimeParserFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ATimeParserFactory.java
new file mode 100644
index 0000000..d76f41d
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ATimeParserFactory.java
@@ -0,0 +1,217 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base.temporal;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+
+public class ATimeParserFactory implements IValueParserFactory {
+
+    public static final IValueParserFactory INSTANCE = new ATimeParserFactory();
+
+    private static final long serialVersionUID = 1L;
+
+    private static final String timeErrorMessage = "Wrong Input Format for a Time Value";
+
+    private ATimeParserFactory() {
+
+    }
+
+    @Override
+    public IValueParser createValueParser() {
+
+        final CharArrayCharSequenceAccessor charArrayAccessor = new CharArrayCharSequenceAccessor();
+
+        return new IValueParser() {
+
+            @Override
+            public void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
+                charArrayAccessor.reset(buffer, start, length);
+                try {
+                    out.writeInt(parseTimePart(charArrayAccessor));
+                } catch (IOException ex) {
+                    throw new HyracksDataException(ex);
+                }
+            }
+        };
+    }
+
+    /**
+     * Parse the given char sequence as a time string, and return the milliseconds represented by the time.
+     * 
+     * @param charAccessor
+     * @return
+     * @throws Exception
+     */
+    public static <T> int parseTimePart(ICharSequenceAccessor<T> charAccessor) throws HyracksDataException {
+
+        int length = charAccessor.getLength();
+        int offset = 0;
+
+        int hour = 0, min = 0, sec = 0, millis = 0;
+        int timezone = 0;
+
+        boolean isExtendedForm = false;
+        if (charAccessor.getCharAt(offset + 2) == ':') {
+            isExtendedForm = true;
+        }
+
+        if (isExtendedForm && (charAccessor.getCharAt(offset + 2) != ':' || charAccessor.getCharAt(offset + 5) != ':')) {
+            throw new HyracksDataException(timeErrorMessage + ": Missing colon in an extended time format.");
+        }
+        // hour
+        for (int i = 0; i < 2; i++) {
+            if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
+                hour = hour * 10 + charAccessor.getCharAt(offset + i) - '0';
+            } else {
+                throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in hour field");
+            }
+        }
+
+        if (hour < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.HOUR.ordinal()]
+                || hour > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.HOUR.ordinal()]) {
+            throw new HyracksDataException(timeErrorMessage + ": hour " + hour);
+        }
+
+        offset += (isExtendedForm) ? 3 : 2;
+
+        // minute
+        for (int i = 0; i < 2; i++) {
+            if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
+                min = min * 10 + charAccessor.getCharAt(offset + i) - '0';
+            } else {
+                throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in minute field");
+            }
+        }
+
+        if (min < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.MINUTE.ordinal()]
+                || min > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.MINUTE.ordinal()]) {
+            throw new HyracksDataException(timeErrorMessage + ": min " + min);
+        }
+
+        offset += (isExtendedForm) ? 3 : 2;
+
+        // second
+        for (int i = 0; i < 2; i++) {
+            if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
+                sec = sec * 10 + charAccessor.getCharAt(offset + i) - '0';
+            } else {
+                throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in second field");
+            }
+        }
+
+        if (sec < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.SECOND.ordinal()]
+                || sec > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.SECOND.ordinal()]) {
+            throw new HyracksDataException(timeErrorMessage + ": sec " + sec);
+        }
+
+        offset += 2;
+
+        if ((isExtendedForm && length > offset && charAccessor.getCharAt(offset) == '.')
+                || (!isExtendedForm && length > offset)) {
+
+            offset += (isExtendedForm) ? 1 : 0;
+            int i = 0;
+            for (; i < 3 && offset + i < length; i++) {
+                if (charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9') {
+                    millis = millis * 10 + charAccessor.getCharAt(offset + i) - '0';
+                } else {
+                    break;
+                }
+            }
+
+            offset += i;
+
+            for (; i < 3; i++) {
+                millis = millis * 10;
+            }
+
+            // error is thrown if more than three digits are seen for the millisecond part
+            if (charAccessor.getLength() > offset && charAccessor.getCharAt(offset) >= '0'
+                    && charAccessor.getCharAt(offset) <= '9') {
+                throw new HyracksDataException(timeErrorMessage + ": too many fields for millisecond.");
+            }
+        }
+
+        if (length > offset) {
+            timezone = parseTimezonePart(charAccessor, offset);
+        }
+
+        return GregorianCalendarSystem.getInstance().getChronon(hour, min, sec, millis, timezone);
+    }
+
+    /**
+     * Parse the given char sequence as a time string, and return the milliseconds represented by the time.
+     * 
+     * @param charAccessor
+     * @return
+     * @throws Exception
+     */
+    public static <T> int parseTimezonePart(ICharSequenceAccessor<T> charAccessor, int offset)
+            throws HyracksDataException {
+        int timezone = 0;
+
+        if (charAccessor.getCharAt(offset) != 'Z') {
+            if ((charAccessor.getCharAt(offset) != '+' && charAccessor.getCharAt(offset) != '-')) {
+                throw new HyracksDataException("Wrong timezone format: missing sign or missing colon for a time zone");
+            }
+
+            short timezoneHour = 0;
+            short timezoneMinute = 0;
+
+            for (int i = 0; i < 2; i++) {
+                if ((charAccessor.getCharAt(offset + 1 + i) >= '0' && charAccessor.getCharAt(offset + 1 + i) <= '9')) {
+                    timezoneHour = (short) (timezoneHour * 10 + charAccessor.getCharAt(offset + 1 + i) - '0');
+                } else {
+                    throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in timezone hour field");
+                }
+            }
+
+            if (timezoneHour < GregorianCalendarSystem.TIMEZONE_HOUR_MIN
+                    || timezoneHour > GregorianCalendarSystem.TIMEZONE_HOUR_MAX) {
+                throw new HyracksDataException(timeErrorMessage + ": time zone hour " + timezoneHour);
+            }
+
+            int temp_offset = (charAccessor.getCharAt(offset + 3) == ':') ? 1 : 0;
+
+            for (int i = 0; i < 2; i++) {
+                if ((charAccessor.getCharAt(offset + temp_offset + 3 + i) >= '0' && charAccessor.getCharAt(offset
+                        + temp_offset + 3 + i) <= '9')) {
+                    timezoneMinute = (short) (timezoneMinute * 10
+                            + charAccessor.getCharAt(offset + temp_offset + 3 + i) - '0');
+                } else {
+                    throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in timezone minute field");
+                }
+            }
+
+            if (timezoneMinute < GregorianCalendarSystem.TIMEZONE_MIN_MIN
+                    || timezoneMinute > GregorianCalendarSystem.TIMEZONE_MIN_MAX) {
+                throw new HyracksDataException(timeErrorMessage + ": time zone minute " + timezoneMinute);
+            }
+
+            if (charAccessor.getCharAt(offset) == '-') {
+                timezone = (byte) -((timezoneHour * 4) + timezoneMinute / 15);
+            } else {
+                timezone = (byte) ((timezoneHour * 4) + timezoneMinute / 15);
+            }
+        }
+        return timezone;
+    }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ByteArrayCharSequenceAccessor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ByteArrayCharSequenceAccessor.java
index e1a2135..453c86f 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ByteArrayCharSequenceAccessor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ByteArrayCharSequenceAccessor.java
@@ -14,27 +14,41 @@
  */
 package edu.uci.ics.asterix.om.base.temporal;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
+
 public class ByteArrayCharSequenceAccessor implements ICharSequenceAccessor<Byte[]> {
 
-    private byte[] string;
+    private byte[] buf;
     private int offset;
-    private int beginOffset;
+    private int length;
 
     @Override
-    public char getCharAt(int index) {
-        return (char) (string[index + offset + beginOffset]);
+    public char getCharAt(int index) throws AsterixRuntimeException {
+        if (index < 0 || index >= length) {
+            throw new AsterixRuntimeException("Byte array char accessor is out of bound: " + index + ":" + length);
+        }
+        return (char) (buf[index + offset]);
     }
 
-    /* The offset is the position of the first letter in the byte array */
-    public void reset(byte[] obj, int beginOffset, int offset) {
-        string = obj;
+    /**
+     * Reset the wrapped byte array.
+     * 
+     * @param obj
+     *            The byte array to be wrapped
+     * @param beginOffset
+     *            The offset of the string stored in the byte array.
+     * @param offset
+     *            The offset of the substring of the string stored (offset from the beginOffset).
+     */
+    public void reset(byte[] obj, int offset, int length) {
+        this.buf = obj;
         this.offset = offset;
-        this.beginOffset = beginOffset;
+        this.length = length;
     }
 
     @Override
     public int getLength() {
-        return ((string[beginOffset - 2] & 0xff) << 8) + ((string[beginOffset - 1] & 0xff) << 0) - offset;
+        return length;
     }
 
 }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/CharArrayCharSequenceAccessor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/CharArrayCharSequenceAccessor.java
new file mode 100644
index 0000000..404f0ee
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/CharArrayCharSequenceAccessor.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base.temporal;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
+
+public class CharArrayCharSequenceAccessor implements ICharSequenceAccessor<char[]> {
+
+    private char[] buf;
+    private int offset;
+    private int length;
+    
+    @Override
+    public char getCharAt(int index) throws AsterixRuntimeException {
+        if (index < 0 || index >= length) {
+            throw new AsterixRuntimeException("Byte array char accessor is out of bound: " + index + ":" + length);
+        }
+        return (char) (buf[index + offset]);
+    }
+
+    /**
+     * Reset the wrapped byte array.
+     * 
+     * @param obj
+     *            The byte array to be wrapped
+     * @param beginOffset
+     *            The offset of the string stored in the byte array.
+     * @param offset
+     *            The offset of the substring of the string stored (offset from the beginOffset).
+     */
+    public void reset(char[] obj, int offset, int length) {
+        this.buf = obj;
+        this.offset = offset;
+        this.length = length;
+    }
+
+    @Override
+    public int getLength() {
+        return length;
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/DurationArithmeticOperations.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/DurationArithmeticOperations.java
new file mode 100644
index 0000000..9d6bc2f
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/DurationArithmeticOperations.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base.temporal;
+
+/**
+ * Algorithms for duration related arithmetic operations.
+ */
+public class DurationArithmeticOperations {
+
+    private final static GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+    /**
+     * Add a duration (with yearMonth and dayTime) onto a time point. The algorithm works as described in
+     * <a
+     * href="http://www.w3.org/TR/xmlschema-2/#adding-durations-to-dateTimes">"XML: adding durations to dateTimes"</a>.
+     * <p/>
+     * The basic algorithm is like this: duration is applied to the time point as two separated fields: year-month field
+     * and day-time field. Year-month field is applied firstly by reserving the correct day within the month's range
+     * (for example add 1M to 03-31 will return 04-30). Then day-time field is applied.
+     * <p/>
+     * 
+     * @param pointChronon
+     * @param yearMonthDuration
+     * @param dayTimeDuration
+     * @return
+     */
+    public static long addDuration(long pointChronon, int yearMonthDuration, long dayTimeDuration) {
+
+        int year = calSystem.getYear(pointChronon);
+        int month = calSystem.getMonthOfYear(pointChronon, year);
+        int day = calSystem.getDayOfMonthYear(pointChronon, year, month);
+        int hour = calSystem.getHourOfDay(pointChronon);
+        int min = calSystem.getMinOfHour(pointChronon);
+        int sec = calSystem.getSecOfMin(pointChronon);
+        int ms = calSystem.getMillisOfSec(pointChronon);
+
+        // Apply the year-month duration
+        int carry = yearMonthDuration / 12;
+        month += (yearMonthDuration % 12);
+
+        if (month < 0) {
+            month += 12;
+            carry -= 1;
+        } else if (month > 12) {
+            month -= 12;
+            carry += 1;
+        }
+
+        year += carry;
+
+        boolean isLeapYear = calSystem.isLeapYear(year);
+
+        if (isLeapYear) {
+            if (day > GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[month - 1]) {
+                day = GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[month - 1];
+            }
+        } else {
+            if (day > GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[month - 1]) {
+                day = GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[month - 1];
+            }
+        }
+
+        return calSystem.getChronon(year, month, day, hour, min, sec, ms, 0) + dayTimeDuration;
+    }
+
+    public static int addDuration(int pointChronon, long dayTimeDuration) {
+        int rtnChronon = (int) ((pointChronon + dayTimeDuration) % GregorianCalendarSystem.CHRONON_OF_DAY);
+        if (rtnChronon < 0) {
+            rtnChronon += GregorianCalendarSystem.CHRONON_OF_DAY;
+        }
+
+        return rtnChronon;
+    }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/GregorianCalendarSystem.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/GregorianCalendarSystem.java
index 149a1d2..d43f235 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/GregorianCalendarSystem.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/GregorianCalendarSystem.java
@@ -57,6 +57,7 @@
     public static final int CHRONON_OF_MINUTE = 60 * CHRONON_OF_SECOND;
     public static final int CHRONON_OF_HOUR = 60 * CHRONON_OF_MINUTE;
     public static final long CHRONON_OF_DAY = 24 * CHRONON_OF_HOUR;
+    public static final int MONTHS_IN_A_YEAR = 12;
 
     /**
      * Minimum feasible value of each field
@@ -238,8 +239,25 @@
         return chrononTime;
     }
 
+    public long adjustChrononByTimezone(long chronon, int timezone) {
+        return chronon + timezone / 4 * CHRONON_OF_HOUR + (timezone % 4) * 15 * CHRONON_OF_MINUTE;
+    }
+
+    public static int getChrononInDays(long chronon) {
+        if (chronon >= 0) {
+            return (int) (chronon / CHRONON_OF_DAY);
+        } else {
+            if (chronon % CHRONON_OF_DAY != 0) {
+                return (int) (chronon / CHRONON_OF_DAY - 1);
+            } else {
+                return (int) (chronon / CHRONON_OF_DAY);
+            }
+        }
+    }
+
     /**
-     * Get the extended string representation of the given UTC chronon time under the given time zone. Only fields before
+     * Get the extended string representation of the given UTC chronon time under the given time zone. Only fields
+     * before
      * the given field index will be returned.
      * <p/>
      * The extended string representation is like:<br/>
@@ -258,6 +276,7 @@
 
         switch (startField) {
             case YEAR:
+            default:
                 sbder.append(String.format(year < 0 ? "%05d" : "%04d", year));
                 if (untilField == Fields.YEAR) {
                     return;
@@ -304,10 +323,13 @@
                 break;
         }
 
-        if (untilField.compareTo(Fields.DAY) > 0) {
+        if (timezone == 0) {
             sbder.append("Z");
         } else {
             short tzMin = (short) ((timezone % 4) * 15);
+            if (tzMin < 0) {
+                tzMin = (short) (-1 * tzMin);
+            }
             short tzHr = (short) (timezone / 4);
             sbder.append((tzHr >= 0 ? "+" : "-")).append(String.format("%02d", (tzHr < 0 ? -tzHr : tzHr))).append(":")
                     .append(String.format("%02d", tzMin));
@@ -328,6 +350,7 @@
 
         switch (startField) {
             case YEAR:
+            default:
                 sbder.append(String.format(year < 0 ? "%05d" : "%04d", year));
                 if (untilField == Fields.YEAR) {
                     return;
@@ -359,10 +382,13 @@
                 break;
         }
 
-        if (untilField.compareTo(Fields.DAY) > 0) {
+        if (timezone == 0) {
             sbder.append("Z");
         } else {
             short tzMin = (short) ((timezone % 4) * 15);
+            if (tzMin < 0) {
+                tzMin = (short) (-1 * tzMin);
+            }
             short tzHr = (short) (timezone / 4);
             sbder.append((tzHr >= 0 ? "+" : "-")).append(String.format("%02d", (tzHr < 0 ? -tzHr : tzHr)))
                     .append(String.format("%02d", tzMin));
@@ -422,7 +448,7 @@
      * @param year
      * @return
      */
-    protected boolean isLeapYear(int year) {
+    public boolean isLeapYear(int year) {
         return ((year & 3) == 0) && ((year % 100) != 0 || (year % 400) == 0);
     }
 
@@ -454,7 +480,8 @@
      * Get the year for the given chronon time.
      * <p/>
      * This code is directly from the Joda library BadicChronology.java.<br/>
-     * The original authers are Stephen Colebourne, Brain S O'Neill and Guy Allard, and modified by JArod Wen on May 7th, 2012.
+     * The original authers are Stephen Colebourne, Brain S O'Neill and Guy Allard, and modified by JArod Wen on May
+     * 7th, 2012.
      * 
      * @param chrononTime
      * @return
@@ -501,7 +528,8 @@
      * Get the month of the year for the given chronon time and the year.
      * <p/>
      * This code is directly from the Joda library BasicGJChronology.java.<br/>
-     * The original authers are Stephen Colebourne, Brain S O'Neill and Guy Allard, and modified by JArod Wen on May 7th, 2012.
+     * The original authers are Stephen Colebourne, Brain S O'Neill and Guy Allard, and modified by JArod Wen on May
+     * 7th, 2012 and commented by Theodoros Ioannou on July 2012.
      * <p/>
      * 
      * @param millis
@@ -565,7 +593,8 @@
      * Get the day of the given month and year for the input chronon time.
      * <p/>
      * This function is directly from Joda Library BasicChronology.java.<br/>
-     * The original authers are Stephen Colebourne, Brain S O'Neill and Guy Allard, and modified by JArod Wen on May 7th, 2012.
+     * The original authers are Stephen Colebourne, Brain S O'Neill and Guy Allard, and modified by JArod Wen on May
+     * 7th, 2012.
      * <p/>
      * 
      * @param millis
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ICharSequenceAccessor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ICharSequenceAccessor.java
index 6b4e898..d5a99a0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ICharSequenceAccessor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ICharSequenceAccessor.java
@@ -14,10 +14,23 @@
  */
 package edu.uci.ics.asterix.om.base.temporal;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
+
 public interface ICharSequenceAccessor<T> {
 
-    public char getCharAt(int index);
+    /**
+     * Return the character in the wrapped char sequence at the given index.
+     * 
+     * @param index
+     * @return
+     */
+    public char getCharAt(int index) throws AsterixRuntimeException;
 
+    /**
+     * Get the length of the wrapped char sequence.
+     * 
+     * @return
+     */
     public int getLength();
 
 }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/StringCharSequenceAccessor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/StringCharSequenceAccessor.java
index 6c02340..17e483a 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/StringCharSequenceAccessor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/StringCharSequenceAccessor.java
@@ -14,24 +14,31 @@
  */
 package edu.uci.ics.asterix.om.base.temporal;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
+
 public class StringCharSequenceAccessor implements ICharSequenceAccessor<String> {
 
     private String string;
     private int offset;
+    private int length;
 
     @Override
-    public char getCharAt(int index) {
+    public char getCharAt(int index) throws AsterixRuntimeException {
+        if (index >= length) {
+            throw new AsterixRuntimeException("String accessor is out of bound.");
+        }
         return string.charAt(index + offset);
     }
 
-    public void reset(String obj, int offset) {
-        string = obj;
+    public void reset(String obj, int offset, int len) {
+        this.string = obj;
         this.offset = offset;
+        this.length = len;
     }
 
     @Override
     public int getLength() {
-        return string.length() - offset;
+        return length;
     }
 
 }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
index 2d4419f..a547737 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
@@ -12,6 +12,8 @@
 import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.ABooleanTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.ACircleTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.ADateTimeTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.ADateTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.ADoubleTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.AFloatTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.AInt32TypeComputer;
@@ -21,6 +23,7 @@
 import edu.uci.ics.asterix.om.typecomputer.impl.APolygonTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.ARectangleTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.AStringTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.ATimeTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.BinaryBooleanOrNullFunctionTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.BinaryStringBoolOrNullTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.BinaryStringStringOrNullTypeComputer;
@@ -40,6 +43,7 @@
 import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedUnaryMinusTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.NotNullTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.OpenRecordConstructorResultType;
+import edu.uci.ics.asterix.om.typecomputer.impl.OptionalABooleanTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.OptionalACircleTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.OptionalADateTimeTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.OptionalADateTypeComputer;
@@ -50,6 +54,7 @@
 import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAInt32TypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAInt64TypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAInt8TypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAIntervalTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.OptionalALineTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAPoint3DTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAPointTypeComputer;
@@ -274,8 +279,6 @@
     public final static FunctionIdentifier SERIAL_LOCAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
             "local-avg-serial", 1);
 
-    public final static FunctionIdentifier YEAR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "year", 1);
-
     public final static FunctionIdentifier SCAN_COLLECTION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
             "scan-collection", 1);
     public final static FunctionIdentifier SUBSET_COLLECTION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
@@ -367,6 +370,49 @@
             "datetime", 1);
     public final static FunctionIdentifier DURATION_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
             "duration", 1);
+    public final static FunctionIdentifier INTERVAL_CONSTRUCTOR_DATE = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "interval-from-date", 2);
+    public final static FunctionIdentifier INTERVAL_CONSTRUCTOR_TIME = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "interval-from-time", 2);
+    public final static FunctionIdentifier INTERVAL_CONSTRUCTOR_DATETIME = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "interval-from-datetime", 2);
+    public final static FunctionIdentifier INTERVAL_CONSTRUCTOR_START_FROM_DATE = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "interval-start-from-date", 2);
+    public final static FunctionIdentifier INTERVAL_CONSTRUCTOR_START_FROM_TIME = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "interval-start-from-time", 2);
+    public final static FunctionIdentifier INTERVAL_CONSTRUCTOR_START_FROM_DATETIME = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "interval-start-from-datetime", 2);
+    public final static FunctionIdentifier INTERVAL_BEFORE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-before", 2);
+    public final static FunctionIdentifier INTERVAL_AFTER = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-after", 2);
+    public final static FunctionIdentifier INTERVAL_MEETS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-meets", 2);
+    public final static FunctionIdentifier INTERVAL_MET_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-met-by", 2);
+    public final static FunctionIdentifier INTERVAL_OVERLAPS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-overlaps", 2);
+    public final static FunctionIdentifier INTERVAL_OVERLAPPED_BY = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "interval-overlapped-by", 2);
+    public final static FunctionIdentifier OVERLAP = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "overlap", 2);
+    public final static FunctionIdentifier INTERVAL_STARTS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-starts", 2);
+    public final static FunctionIdentifier INTERVAL_STARTED_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-started-by", 2);
+    public final static FunctionIdentifier INTERVAL_COVERS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-covers", 2);
+    public final static FunctionIdentifier INTERVAL_COVERED_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-covered-by", 2);
+    public final static FunctionIdentifier INTERVAL_ENDS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-ends", 2);
+    public final static FunctionIdentifier INTERVAL_ENDED_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-ended-by", 2);
+    public final static FunctionIdentifier CURRENT_TIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "current-time", 0);
+    public final static FunctionIdentifier CURRENT_DATE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "current-date", 0);
+    public final static FunctionIdentifier CURRENT_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "current-datetime", 0);
 
     // spatial
     public final static FunctionIdentifier CREATE_POINT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
@@ -398,17 +444,62 @@
     public final static FunctionIdentifier CAST_RECORD = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
             "cast-record", 1);
 
-    public final static FunctionIdentifier GET_POINT_X_COORDINATE_ACCESSOR = new FunctionIdentifier(
-            FunctionConstants.ASTERIX_NS, "get-x", 1);
-    public final static FunctionIdentifier GET_POINT_Y_COORDINATE_ACCESSOR = new FunctionIdentifier(
-            FunctionConstants.ASTERIX_NS, "get-y", 1);
-    public final static FunctionIdentifier GET_CIRCLE_RADIUS_ACCESSOR = new FunctionIdentifier(
-            FunctionConstants.ASTERIX_NS, "get-radius", 1);
-    public final static FunctionIdentifier GET_CIRCLE_CENTER_ACCESSOR = new FunctionIdentifier(
-            FunctionConstants.ASTERIX_NS, "get-center", 1);
-    public final static FunctionIdentifier GET_POINTS_LINE_RECTANGLE_POLYGON_ACCESSOR = new FunctionIdentifier(
-            FunctionConstants.ASTERIX_NS, "get-points", 1);
+    // Spatial and temporal type accessors
+    public static final FunctionIdentifier ACCESSOR_TEMPORAL_YEAR = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "year", 1);
+    public static final FunctionIdentifier ACCESSOR_TEMPORAL_MONTH = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "month", 2);
+    public static final FunctionIdentifier ACCESSOR_TEMPORAL_DAY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "day", 1);
+    public static final FunctionIdentifier ACCESSOR_TEMPORAL_HOUR = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "hour", 1);
+    public static final FunctionIdentifier ACCESSOR_TEMPORAL_MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "minute", 1);
+    public static final FunctionIdentifier ACCESSOR_TEMPORAL_SEC = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "second", 1);
+    public static final FunctionIdentifier ACCESSOR_TEMPORAL_MILLISEC = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "millisecond", 1);
 
+    // Temporal functions
+    public static final FunctionIdentifier DATE_FROM_UNIX_TIME_IN_DAYS = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "date-from-unix-time-in-days", 1);
+    public static final FunctionIdentifier DATE_FROM_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "date-from-datetime", 1);
+    public final static FunctionIdentifier ADD_DATE_DURATION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "add-date-duration", 2);
+    public final static FunctionIdentifier SUBTRACT_DATE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "subtract-date", 2);
+    public final static FunctionIdentifier TIME_FROM_UNIX_TIME_IN_MS = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "time-from-unix-time-in-ms", 1);
+    public final static FunctionIdentifier TIME_FROM_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "time-from-datetime", 1);
+    public final static FunctionIdentifier SUBTRACT_TIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "subtract-time", 2);
+    public final static FunctionIdentifier ADD_TIME_DURATION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "add-time-duration", 2);
+    public final static FunctionIdentifier DATETIME_FROM_UNIX_TIME_IN_MS = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "datetime-from-unix-time-in-ms", 1);
+    public final static FunctionIdentifier DATETIME_FROM_DATE_TIME = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "datetime-from-date-time", 2);
+    public final static FunctionIdentifier SUBTRACT_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "subtract-datetime", 2);
+    public final static FunctionIdentifier ADD_DATETIME_DURATION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "add-datetime-duration", 2);
+    public final static FunctionIdentifier CALENDAR_DURATION_FROM_DATETIME = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "calendar-duration-from-datetime", 2);
+    public final static FunctionIdentifier CALENDAR_DURATION_FROM_DATE = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "calendar-duration-from-date", 2);
+    public final static FunctionIdentifier ADJUST_TIME_FOR_TIMEZONE = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "adjust-time-for-timezone", 2);
+    public final static FunctionIdentifier ADJUST_DATETIME_FOR_TIMEZONE = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "adjust-datetime-for-timezone", 2);
+
+    public final static FunctionIdentifier GET_POINT_X_COORDINATE_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-x", 1);
+    public final static FunctionIdentifier GET_POINT_Y_COORDINATE_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-y", 1);
+    public final static FunctionIdentifier GET_CIRCLE_RADIUS_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-radius", 1);
+    public final static FunctionIdentifier GET_CIRCLE_CENTER_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-center", 1);
+    public final static FunctionIdentifier GET_POINTS_LINE_RECTANGLE_POLYGON_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-points", 1);
+    
     public static final FunctionIdentifier EQ = AlgebricksBuiltinFunctions.EQ;
     public static final FunctionIdentifier LE = AlgebricksBuiltinFunctions.LE;
     public static final FunctionIdentifier GE = AlgebricksBuiltinFunctions.GE;
@@ -661,7 +752,6 @@
         add(TIME_CONSTRUCTOR, OptionalATimeTypeComputer.INSTANCE);
         add(TYPE_OF, null); // TODO
         add(UNORDERED_LIST_CONSTRUCTOR, UnorderedListConstructorResultType.INSTANCE);
-        add(YEAR, OptionalAInt32TypeComputer.INSTANCE);
         add(WORD_TOKENS, new IResultTypeComputer() {
 
             @Override
@@ -671,12 +761,64 @@
             }
         });
 
+        // temporal type accessors
+        add(ACCESSOR_TEMPORAL_YEAR, OptionalAInt32TypeComputer.INSTANCE);
+        add(ACCESSOR_TEMPORAL_MONTH, OptionalAInt32TypeComputer.INSTANCE);
+        add(ACCESSOR_TEMPORAL_DAY, OptionalAInt32TypeComputer.INSTANCE);
+        add(ACCESSOR_TEMPORAL_HOUR, OptionalAInt32TypeComputer.INSTANCE);
+        add(ACCESSOR_TEMPORAL_MIN, OptionalAInt32TypeComputer.INSTANCE);
+        add(ACCESSOR_TEMPORAL_SEC, OptionalAInt32TypeComputer.INSTANCE);
+        add(ACCESSOR_TEMPORAL_MILLISEC, OptionalAInt32TypeComputer.INSTANCE);
+
+        // temporal functions
+        add(DATE_FROM_UNIX_TIME_IN_DAYS, OptionalADateTypeComputer.INSTANCE);
+        add(DATE_FROM_DATETIME, OptionalADateTypeComputer.INSTANCE);
+        add(ADD_DATE_DURATION, OptionalADateTypeComputer.INSTANCE);
+        add(SUBTRACT_DATE, OptionalADurationTypeComputer.INSTANCE);
+        add(TIME_FROM_UNIX_TIME_IN_MS, OptionalATimeTypeComputer.INSTANCE);
+        add(TIME_FROM_DATETIME, OptionalATimeTypeComputer.INSTANCE);
+        add(SUBTRACT_TIME, OptionalADurationTypeComputer.INSTANCE);
+        add(ADD_TIME_DURATION, OptionalATimeTypeComputer.INSTANCE);
+        add(DATETIME_FROM_DATE_TIME, OptionalADateTimeTypeComputer.INSTANCE);
+        add(DATETIME_FROM_UNIX_TIME_IN_MS, OptionalADateTimeTypeComputer.INSTANCE);
+        add(SUBTRACT_DATETIME, OptionalADurationTypeComputer.INSTANCE);
+        add(ADD_DATETIME_DURATION, OptionalADateTimeTypeComputer.INSTANCE);
+        add(CALENDAR_DURATION_FROM_DATETIME, OptionalADurationTypeComputer.INSTANCE);
+        add(CALENDAR_DURATION_FROM_DATE, OptionalADurationTypeComputer.INSTANCE);
+        add(ADJUST_DATETIME_FOR_TIMEZONE, OptionalAStringTypeComputer.INSTANCE);
+        add(ADJUST_TIME_FOR_TIMEZONE, OptionalAStringTypeComputer.INSTANCE);
+        add(INTERVAL_BEFORE, OptionalABooleanTypeComputer.INSTANCE);
+        add(INTERVAL_AFTER, OptionalABooleanTypeComputer.INSTANCE);
+        add(INTERVAL_MEETS, OptionalABooleanTypeComputer.INSTANCE);
+        add(INTERVAL_MET_BY, OptionalABooleanTypeComputer.INSTANCE);
+        add(INTERVAL_OVERLAPS, OptionalABooleanTypeComputer.INSTANCE);
+        add(INTERVAL_OVERLAPPED_BY, OptionalABooleanTypeComputer.INSTANCE);
+        add(OVERLAP, OptionalABooleanTypeComputer.INSTANCE);
+        add(INTERVAL_STARTS, OptionalABooleanTypeComputer.INSTANCE);
+        add(INTERVAL_STARTED_BY, OptionalABooleanTypeComputer.INSTANCE);
+        add(INTERVAL_COVERS, OptionalABooleanTypeComputer.INSTANCE);
+        add(INTERVAL_COVERED_BY, OptionalABooleanTypeComputer.INSTANCE);
+        add(INTERVAL_ENDS, OptionalABooleanTypeComputer.INSTANCE);
+        add(INTERVAL_ENDED_BY, OptionalABooleanTypeComputer.INSTANCE);
+        add(CURRENT_DATE, ADateTypeComputer.INSTANCE);
+        add(CURRENT_TIME, ATimeTypeComputer.INSTANCE);
+        add(CURRENT_DATETIME, ADateTimeTypeComputer.INSTANCE);
+
+        // interval constructors
+        add(INTERVAL_CONSTRUCTOR_DATE, OptionalAIntervalTypeComputer.INSTANCE);
+        add(INTERVAL_CONSTRUCTOR_TIME, OptionalAIntervalTypeComputer.INSTANCE);
+        add(INTERVAL_CONSTRUCTOR_DATETIME, OptionalAIntervalTypeComputer.INSTANCE);
+        add(INTERVAL_CONSTRUCTOR_START_FROM_DATE, OptionalAIntervalTypeComputer.INSTANCE);
+        add(INTERVAL_CONSTRUCTOR_START_FROM_DATETIME, OptionalAIntervalTypeComputer.INSTANCE);
+        add(INTERVAL_CONSTRUCTOR_START_FROM_TIME, OptionalAIntervalTypeComputer.INSTANCE);
+
         String metadataFunctionLoaderClassName = "edu.uci.ics.asterix.metadata.functions.MetadataBuiltinFunctions";
         try {
             Class.forName(metadataFunctionLoaderClassName);
         } catch (ClassNotFoundException e) {
             throw new RuntimeException(e);
         }
+
     }
 
     static {
@@ -853,7 +995,7 @@
         funTypeComputer.put(functionInfo, typeComputer);
         finfoRepo.put(fi);
     }
-
+    
     private static IFunctionInfo addPrivateFunction(FunctionIdentifier fi, IResultTypeComputer typeComputer) {
         IFunctionInfo functionInfo = getAsterixFunctionInfo(fi);
         builtinFunctionsSet.put(functionInfo, functionInfo);
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/DefaultOpenFieldType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/DefaultOpenFieldType.java
index 9184616..45ae5c5 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/DefaultOpenFieldType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/DefaultOpenFieldType.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -15,6 +15,8 @@
 
 package edu.uci.ics.asterix.om.pointables.base;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
 import edu.uci.ics.asterix.om.types.AOrderedListType;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.ATypeTag;
@@ -27,13 +29,19 @@
  * fields in the open part, e.g., a "record" (nested) field in the open part is
  * always a fully open one, and a "list" field in the open part is always a list
  * of "ANY".
- * 
  */
 public class DefaultOpenFieldType {
 
     // nested open field rec type
-    public static ARecordType NESTED_OPEN_RECORD_TYPE = new ARecordType("nested-open", new String[] {},
-            new IAType[] {}, true);
+    public static ARecordType NESTED_OPEN_RECORD_TYPE;
+
+    static {
+        try {
+            NESTED_OPEN_RECORD_TYPE = new ARecordType("nested-open", new String[] {}, new IAType[] {}, true);
+        } catch (AsterixException e) {
+            throw new AsterixRuntimeException();
+        }
+    }
 
     // nested open list type
     public static AOrderedListType NESTED_OPEN_AORDERED_LIST_TYPE = new AOrderedListType(BuiltinType.ANY,
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ADateTimeTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ADateTimeTypeComputer.java
new file mode 100644
index 0000000..c7a51da
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ADateTimeTypeComputer.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class ADateTimeTypeComputer implements IResultTypeComputer {
+
+    public static final ADateTimeTypeComputer INSTANCE = new ADateTimeTypeComputer();
+
+    private ADateTimeTypeComputer() {
+    }
+
+    @Override
+    public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+            IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+        return BuiltinType.ADATETIME;
+    }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ATimeTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ATimeTypeComputer.java
new file mode 100644
index 0000000..55e1bc4
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ATimeTypeComputer.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class ATimeTypeComputer implements IResultTypeComputer {
+
+    public static final ATimeTypeComputer INSTANCE = new ATimeTypeComputer();
+
+    private ATimeTypeComputer() {
+    }
+
+    @Override
+    public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+            IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+        return BuiltinType.ATIME;
+    }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ClosedRecordConstructorResultType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ClosedRecordConstructorResultType.java
index daf7164..8ed2084 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ClosedRecordConstructorResultType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ClosedRecordConstructorResultType.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -19,6 +19,7 @@
 
 import org.apache.commons.lang3.mutable.Mutable;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.om.base.AString;
 import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
 import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
@@ -49,7 +50,7 @@
         ARecordType type = (ARecordType) TypeComputerUtilities.getRequiredType(f);
         if (type != null)
             return type;
-        
+
         int n = f.getArguments().size() / 2;
         String[] fieldNames = new String[n];
         IAType[] fieldTypes = new IAType[n];
@@ -68,6 +69,10 @@
             fieldTypes[i] = (IAType) env.getType(e2);
             i++;
         }
-        return new ARecordType(null, fieldNames, fieldTypes, false);
+        try {
+            return new ARecordType(null, fieldNames, fieldTypes, false);
+        } catch (AsterixException e) {
+            throw new AlgebricksException(e);
+        }
     }
 }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedLocalAvgTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedLocalAvgTypeComputer.java
index 8b54197..ee52425 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedLocalAvgTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedLocalAvgTypeComputer.java
@@ -1,8 +1,24 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.om.typecomputer.impl;
 
 import java.util.ArrayList;
 import java.util.List;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.AUnionType;
@@ -23,7 +39,11 @@
         List<IAType> unionList = new ArrayList<IAType>();
         unionList.add(BuiltinType.ANULL);
         unionList.add(BuiltinType.ADOUBLE);
-        return new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
-                new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, false);
+        try {
+            return new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
+                    new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, false);
+        } catch (AsterixException e) {
+            throw new AlgebricksException(e);
+        }
     }
 }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OpenRecordConstructorResultType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OpenRecordConstructorResultType.java
index c46c59b..0c6fc55 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OpenRecordConstructorResultType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OpenRecordConstructorResultType.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -21,6 +21,7 @@
 
 import org.apache.commons.lang3.mutable.Mutable;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.om.base.AString;
 import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
 import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
@@ -72,6 +73,10 @@
         IAType[] fieldTypes = new IAType[n];
         fieldNames = namesList.toArray(fieldNames);
         fieldTypes = typesList.toArray(fieldTypes);
-        return new ARecordType(null, fieldNames, fieldTypes, true);
+        try {
+            return new ARecordType(null, fieldNames, fieldTypes, true);
+        } catch (AsterixException e) {
+            throw new AlgebricksException(e);
+        }
     }
 }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalABooleanTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalABooleanTypeComputer.java
new file mode 100644
index 0000000..abeea2a
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalABooleanTypeComputer.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class OptionalABooleanTypeComputer implements IResultTypeComputer {
+
+    public static final OptionalABooleanTypeComputer INSTANCE = new OptionalABooleanTypeComputer();
+
+    private OptionalABooleanTypeComputer() {
+    }
+
+    @Override
+    public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+            IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+        List<IAType> unionList = new ArrayList<IAType>();
+        unionList.add(BuiltinType.ANULL);
+        unionList.add(BuiltinType.ABOOLEAN);
+        return new AUnionType(unionList, "OptionalBoolean");
+    }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalAIntervalTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalAIntervalTypeComputer.java
new file mode 100644
index 0000000..bb9f993
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalAIntervalTypeComputer.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class OptionalAIntervalTypeComputer implements IResultTypeComputer {
+
+    public static final OptionalAIntervalTypeComputer INSTANCE = new OptionalAIntervalTypeComputer();
+
+    private OptionalAIntervalTypeComputer() {
+
+    }
+
+    public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+            IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+        List<IAType> unionList = new ArrayList<IAType>();
+        unionList.add(BuiltinType.ANULL);
+        unionList.add(BuiltinType.AINTERVAL);
+        return new AUnionType(unionList, "OptionalInterval");
+    }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/RecordConstructorResultType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/RecordConstructorResultType.java
index d20f43b..1f072f0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/RecordConstructorResultType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/RecordConstructorResultType.java
@@ -1,9 +1,25 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.om.typecomputer.impl;
 
 import java.util.Iterator;
 
 import org.apache.commons.lang3.mutable.Mutable;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.om.base.AString;
 import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
 import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
@@ -58,6 +74,10 @@
             }
             i++;
         }
-        return new ARecordType(null, fieldNames, fieldTypes, isOpen);
+        try {
+            return new ARecordType(null, fieldNames, fieldTypes, isOpen);
+        } catch (AsterixException e) {
+            throw new AlgebricksException(e);
+        }
     }
 }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
index 9ad960f..20e3f56 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
@@ -1,10 +1,26 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.om.types;
 
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 
 import org.json.JSONArray;
 import org.json.JSONException;
@@ -14,6 +30,13 @@
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.om.base.IAObject;
 import edu.uci.ics.asterix.om.visitors.IOMVisitor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 
 public class ARecordType extends AbstractComplexType {
 
@@ -22,16 +45,110 @@
     private IAType[] fieldTypes;
     private boolean isOpen;
     private final List<IRecordTypeAnnotation> annotations = new ArrayList<IRecordTypeAnnotation>();
-    private final Map<String, Integer> typeMap = new HashMap<String, Integer>();
 
-    public ARecordType(String typeName, String[] fieldNames, IAType[] fieldTypes, boolean isOpen) {
+    private transient IBinaryHashFunction fieldNameHashFunction;
+    private transient IBinaryComparator fieldNameComparator;
+    private final byte serializedFieldNames[];
+    private final int serializedFieldNameOffsets[];
+    private final long hashCodeIndexPairs[];
+
+    /**
+     * @param typeName
+     *            the name of the type
+     * @param fieldNames
+     *            the names of the closed fields
+     * @param fieldTypes
+     *            the types of the closed fields
+     * @param isOpen
+     *            whether the record is open
+     * @throws AsterixException
+     *             if there are duplicate field names or if there is an error serializing the field names
+     */
+    public ARecordType(String typeName, String[] fieldNames, IAType[] fieldTypes, boolean isOpen)
+            throws AsterixException {
         super(typeName);
         this.fieldNames = fieldNames;
         this.fieldTypes = fieldTypes;
         this.isOpen = isOpen;
+
+        fieldNameComparator = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY)
+                .createBinaryComparator();
+        fieldNameHashFunction = new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY)
+                .createBinaryHashFunction();
+        ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+        DataOutputStream dos = new DataOutputStream(baaos);
+        serializedFieldNameOffsets = new int[fieldNames.length];
+        hashCodeIndexPairs = new long[fieldNames.length];
+
+        int length = 0;
         for (int i = 0; i < fieldNames.length; i++) {
-            typeMap.put(fieldNames[i], i);
+            serializedFieldNameOffsets[i] = baaos.size();
+            try {
+                dos.writeUTF(fieldNames[i]);
+            } catch (IOException e) {
+                throw new AsterixException(e);
+            }
+            length = baaos.size() - serializedFieldNameOffsets[i];
+            hashCodeIndexPairs[i] = fieldNameHashFunction.hash(baaos.getByteArray(), serializedFieldNameOffsets[i],
+                    length);
+            hashCodeIndexPairs[i] = hashCodeIndexPairs[i] << 32;
+            hashCodeIndexPairs[i] = hashCodeIndexPairs[i] | i;
         }
+        serializedFieldNames = baaos.getByteArray();
+
+        Arrays.sort(hashCodeIndexPairs);
+        int j;
+        for (int i = 0; i < fieldNames.length; i++) {
+            j = findFieldPosition(serializedFieldNames, serializedFieldNameOffsets[i],
+                    UTF8StringPointable.getStringLength(serializedFieldNames, serializedFieldNameOffsets[i]));
+            if (j != i) {
+                throw new AsterixException("Closed fields " + j + " and " + i + " have the same field name \""
+                        + fieldNames[i] + "\"");
+            }
+        }
+    }
+
+    private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
+        ois.defaultReadObject();
+        fieldNameComparator = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY)
+                .createBinaryComparator();
+        fieldNameHashFunction = new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY)
+                .createBinaryHashFunction();
+    }
+
+    /**
+     * Returns the position of the field in the closed schema or -1 if the field does not exist.
+     * 
+     * @param bytes
+     *            the serialized bytes of the field name
+     * @param start
+     *            the starting offset of the field name in bytes
+     * @param length
+     *            the length of the field name in bytes
+     * @return the position of the field in the closed schema or -1 if the field does not exist.
+     */
+    public int findFieldPosition(byte[] bytes, int start, int length) {
+        if (hashCodeIndexPairs.length == 0) {
+            return -1;
+        }
+
+        int fIndex;
+        int probeFieldHash = fieldNameHashFunction.hash(bytes, start, length);
+        int i = Arrays.binarySearch(hashCodeIndexPairs, ((long) probeFieldHash) << 32);
+        i = (i < 0) ? (i = -1 * (i + 1)) : i;
+
+        while (i < hashCodeIndexPairs.length && (int) (hashCodeIndexPairs[i] >>> 32) == probeFieldHash) {
+            fIndex = (int) hashCodeIndexPairs[i];
+            int cFieldLength = UTF8StringPointable.getStringLength(serializedFieldNames,
+                    serializedFieldNameOffsets[fIndex]);
+            if (fieldNameComparator.compare(serializedFieldNames, serializedFieldNameOffsets[fIndex], cFieldLength,
+                    bytes, start, length) == 0) {
+                return fIndex;
+            }
+            i++;
+        }
+
+        return -1;
     }
 
     public final String[] getFieldNames() {
@@ -77,17 +194,22 @@
         return isOpen;
     }
 
-    public int findFieldPosition(String fldName) {
-        for (int i = 0; i < fieldNames.length; i++) {
-            if (fieldNames[i].equals(fldName)) {
-                return i;
-            }
-        }
-        return -1;
+    /**
+     * Returns the position of the field in the closed schema or -1 if the field does not exist.
+     * 
+     * @param fieldName
+     *            the name of the field whose position is sought
+     * @return the position of the field in the closed schema or -1 if the field does not exist.
+     */
+    public int findFieldPosition(String fieldName) throws IOException {
+        ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+        DataOutputStream dos = new DataOutputStream(baaos);
+        UTF8StringSerializerDeserializer.INSTANCE.serialize(fieldName, dos);
+        return findFieldPosition(baaos.getByteArray(), 0, baaos.getByteArray().length);
     }
 
-    public IAType getFieldType(String fieldName) {
-        return fieldTypes[typeMap.get(fieldName)];
+    public IAType getFieldType(String fieldName) throws IOException {
+        return fieldTypes[findFieldPosition(fieldName)];
     }
 
     @Override
@@ -119,14 +241,13 @@
     public int hash() {
         int h = 0;
         for (int i = 0; i < fieldNames.length; i++) {
-            h += 31 * h + fieldNames[i].hashCode();
+            h += 31 * h + (int) (hashCodeIndexPairs[i] >> 32);
         }
         for (int i = 0; i < fieldTypes.length; i++) {
             h += 31 * h + fieldTypes[i].hashCode();
         }
         return h;
     }
-
     @Override
     public JSONObject toJSON() throws JSONException {
         JSONObject type = new JSONObject();
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ATypeTag.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ATypeTag.java
index b75c074..e69fbcd 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ATypeTag.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ATypeTag.java
@@ -42,6 +42,7 @@
     LINE(30),
     POLYGON(31),
     CIRCLE(32),
+    INTERVAL(34),
     RECTANGLE(33),
     SYSTEM_NULL(34);
 
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/BuiltinType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/BuiltinType.java
index 4099c40..1e498bc 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/BuiltinType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/BuiltinType.java
@@ -431,6 +431,27 @@
         }
     };
 
+    public final static BuiltinType AINTERVAL = new LowerCaseConstructorType() {
+
+        private static final long serialVersionUID = 1L;
+
+        @Override
+        public String getDisplayName() {
+            return "AInterval";
+        }
+
+        @Override
+        public ATypeTag getTypeTag() {
+            return ATypeTag.INTERVAL;
+        }
+
+        @Override
+        public String getTypeName() {
+            return "interval";
+        }
+
+    };
+
     public final static BuiltinType APOINT = new LowerCaseConstructorType() {
 
         private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/NonTaggedFormatUtil.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/NonTaggedFormatUtil.java
index 8832164..a8c0485 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/NonTaggedFormatUtil.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/NonTaggedFormatUtil.java
@@ -95,6 +95,8 @@
                 return 12;
             case POINT:
                 return 16;
+            case INTERVAL:
+                return 17;
             case POINT3D:
             case CIRCLE:
                 return 24;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/IOMVisitor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/IOMVisitor.java
index 2c82174..703b792 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/IOMVisitor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/IOMVisitor.java
@@ -14,6 +14,7 @@
 import edu.uci.ics.asterix.om.base.AInt32;
 import edu.uci.ics.asterix.om.base.AInt64;
 import edu.uci.ics.asterix.om.base.AInt8;
+import edu.uci.ics.asterix.om.base.AInterval;
 import edu.uci.ics.asterix.om.base.ALine;
 import edu.uci.ics.asterix.om.base.ANull;
 import edu.uci.ics.asterix.om.base.AOrderedList;
@@ -46,6 +47,8 @@
 
     public void visitADuration(ADuration obj) throws AsterixException;
 
+    public void visitAInterval(AInterval obj) throws AsterixException;
+
     public void visitADate(ADate obj) throws AsterixException;
 
     public void visitATime(ATime obj) throws AsterixException;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/OMPrintToStringVisitor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/OMPrintToStringVisitor.java
index 1697c5c..e7856f0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/OMPrintToStringVisitor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/OMPrintToStringVisitor.java
@@ -15,6 +15,7 @@
 import edu.uci.ics.asterix.om.base.AInt32;
 import edu.uci.ics.asterix.om.base.AInt64;
 import edu.uci.ics.asterix.om.base.AInt8;
+import edu.uci.ics.asterix.om.base.AInterval;
 import edu.uci.ics.asterix.om.base.ALine;
 import edu.uci.ics.asterix.om.base.ANull;
 import edu.uci.ics.asterix.om.base.AOrderedList;
@@ -90,6 +91,12 @@
     }
 
     @Override
+    public void visitAInterval(AInterval obj) throws AsterixException {
+        // TODO Auto-generated method stub
+        throw new NotImplementedException();
+    }
+
+    @Override
     public void visitAFloat(AFloat obj) throws AsterixException {
         buffer.append(obj.getFloatValue() + "f");
     }
diff --git a/asterix-runtime/pom.xml b/asterix-runtime/pom.xml
index ddc1cfd..be64661 100644
--- a/asterix-runtime/pom.xml
+++ b/asterix-runtime/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
 	<parent>
 		<artifactId>asterix</artifactId>
@@ -6,6 +7,8 @@
 		<version>0.0.4-SNAPSHOT</version>
 	</parent>
 	<artifactId>asterix-runtime</artifactId>
+
+
 	<build>
 		<plugins>
 			<plugin>
@@ -13,27 +16,84 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
 				</configuration>
 			</plugin>
-			<plugin>
-				<groupId>org.codehaus.mojo</groupId>
-				<artifactId>javacc-maven-plugin</artifactId>
-				<version>2.6</version>
-				<executions>
-					<execution>
-						<id>javacc</id>
-						<goals>
-							<goal>javacc</goal>
-						</goals>
-						<configuration>
-							<isStatic>false</isStatic>
-						</configuration>
-					</execution>
-				</executions>
-			</plugin>
-		</plugins>
+	      	<plugin>
+	        	<groupId>edu.uci.ics.asterix</groupId>
+	        	<artifactId>lexer-generator-maven-plugin</artifactId>
+	        	<version>0.1</version>
+	        	<configuration>
+	        	  	<grammarFile>src/main/resources/adm.grammar</grammarFile>
+	        	  	<outputDir>${project.build.directory}/generated-sources/edu/uci/ics/asterix/runtime/operators/file/adm</outputDir>
+	        	</configuration>
+	        	<executions>
+		          	<execution>
+		          		<id>generate-lexer</id>
+            			<phase>generate-sources</phase>
+			            <goals>
+	              			<goal>generate-lexer</goal>
+	            		</goals>
+	          		</execution>
+	        	</executions>
+	      	</plugin>
+	      	 <plugin>
+			    <groupId>org.codehaus.mojo</groupId>
+			    <artifactId>build-helper-maven-plugin</artifactId>
+			    <executions>
+			        <execution>
+			            <id>add-source</id>
+			            <phase>generate-sources</phase>
+			            <goals>
+			                <goal>add-source</goal>
+			            </goals>
+			            <configuration>
+			                <sources>
+			                    <source>${project.build.directory}/generated-sources/</source>
+			                </sources>
+			            </configuration>
+			        </execution>
+			    </executions>
+ 			</plugin>
+	    </plugins>
+		<pluginManagement>
+			<plugins>
+				<!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
+				<plugin>
+					<groupId>org.eclipse.m2e</groupId>
+					<artifactId>lifecycle-mapping</artifactId>
+					<version>1.0.0</version>
+					<configuration>
+						<lifecycleMappingMetadata>
+							<pluginExecutions>
+								<pluginExecution>
+									<pluginExecutionFilter>
+										<groupId>
+											edu.uci.ics.asterix
+										</groupId>
+										<artifactId>
+											lexer-generator-maven-plugin
+										</artifactId>
+										<versionRange>
+											[0.1,)
+										</versionRange>
+										<goals>
+											<goal>generate-lexer</goal>
+										</goals>
+									</pluginExecutionFilter>
+									<action>
+										<execute>
+											<runOnIncremental>false</runOnIncremental>
+										</execute>
+									</action>
+								</pluginExecution>
+							</pluginExecutions>
+						</lifecycleMappingMetadata>
+					</configuration>
+				</plugin>
+			</plugins>
+		</pluginManagement>
 	</build>
 
 	<dependencies>
@@ -51,25 +111,25 @@
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
-		        <artifactId>hyracks-storage-am-btree</artifactId>
+			<artifactId>hyracks-storage-am-btree</artifactId>
 		</dependency>
 		<dependency>
-		        <groupId>edu.uci.ics.asterix</groupId>
-		        <artifactId>asterix-transactions</artifactId>
+			<groupId>edu.uci.ics.asterix</groupId>
+			<artifactId>asterix-transactions</artifactId>
 			<version>0.0.4-SNAPSHOT</version>
 			<scope>compile</scope>
 		</dependency>
 		<dependency>
-		        <groupId>org.twitter4j</groupId>
-		        <artifactId>twitter4j-core</artifactId>
-		        <version>2.2.3</version>
+			<groupId>org.twitter4j</groupId>
+			<artifactId>twitter4j-core</artifactId>
+			<version>2.2.3</version>
 		</dependency>
 		<dependency>
-		        <groupId>org.apache.hadoop</groupId>
-		        <artifactId>hadoop-core</artifactId>
-		        <version>0.20.2</version>
-		        <type>jar</type>
-		        <scope>compile</scope>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-core</artifactId>
+			<version>0.20.2</version>
+			<type>jar</type>
+			<scope>compile</scope>
 		</dependency>
 	</dependencies>
 
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableGlobalAvgAggregateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableGlobalAvgAggregateDescriptor.java
index 89081ab..f720434 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableGlobalAvgAggregateDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableGlobalAvgAggregateDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.runtime.aggregates.serializable.std;
 
 import java.io.DataOutput;
@@ -6,6 +21,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
@@ -59,8 +75,15 @@
         List<IAType> unionList = new ArrayList<IAType>();
         unionList.add(BuiltinType.ANULL);
         unionList.add(BuiltinType.ADOUBLE);
-        final ARecordType recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
-                new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT64 }, true);
+        ARecordType _recType;
+        try {
+            _recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
+                    new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT64 }, true);
+        } catch (AsterixException e) {
+            throw new AlgebricksException(e);
+        }
+
+        final ARecordType recType = _recType;
 
         return new ICopySerializableAggregateFunctionFactory() {
             private static final long serialVersionUID = 1L;
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableLocalAvgAggregateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableLocalAvgAggregateDescriptor.java
index 41047f7..219204b 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableLocalAvgAggregateDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableLocalAvgAggregateDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.runtime.aggregates.serializable.std;
 
 import java.io.DataOutput;
@@ -6,6 +21,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
@@ -63,8 +79,15 @@
         List<IAType> unionList = new ArrayList<IAType>();
         unionList.add(BuiltinType.ANULL);
         unionList.add(BuiltinType.ADOUBLE);
-        final ARecordType recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
-                new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT64 }, true);
+        ARecordType tmpRecType;
+        try {
+            tmpRecType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
+                    new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT64 }, true);
+        } catch (AsterixException e) {
+            throw new AlgebricksException(e);
+        }
+
+        final ARecordType recType = tmpRecType;
 
         return new ICopySerializableAggregateFunctionFactory() {
             private static final long serialVersionUID = 1L;
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/AvgAggregateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/AvgAggregateDescriptor.java
index 4fe5e35..93c7026 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/AvgAggregateDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/AvgAggregateDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.runtime.aggregates.std;
 
 import java.io.DataOutput;
@@ -7,6 +22,7 @@
 import java.util.List;
 
 import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
@@ -64,8 +80,15 @@
         List<IAType> unionList = new ArrayList<IAType>();
         unionList.add(BuiltinType.ANULL);
         unionList.add(BuiltinType.ADOUBLE);
-        final ARecordType recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
-                new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, true);
+        ARecordType tmpRecType;
+        try {
+            tmpRecType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
+                    new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, true);
+        } catch (AsterixException e) {
+            throw new AlgebricksException(e);
+        }
+
+        final ARecordType recType = tmpRecType;
 
         return new ICopyAggregateFunctionFactory() {
             private static final long serialVersionUID = 1L;
@@ -78,7 +101,7 @@
 
                     private DataOutput out = provider.getDataOutput();
                     private ArrayBackedValueStorage inputVal = new ArrayBackedValueStorage();
-                    private ICopyEvaluator eval = args[0].createEvaluator(inputVal);                    
+                    private ICopyEvaluator eval = args[0].createEvaluator(inputVal);
                     private double sum;
                     private int count;
                     private ATypeTag aggType;
@@ -115,13 +138,13 @@
                     @Override
                     public void step(IFrameTupleReference tuple) throws AlgebricksException {
                         inputVal.reset();
-                        eval.evaluate(tuple);                        
+                        eval.evaluate(tuple);
                         ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER
                                 .deserialize(inputVal.getByteArray()[0]);
                         if (typeTag == ATypeTag.NULL || aggType == ATypeTag.NULL) {
                             aggType = ATypeTag.NULL;
                             return;
-                        } else if (aggType == ATypeTag.SYSTEM_NULL) {                           
+                        } else if (aggType == ATypeTag.SYSTEM_NULL) {
                             aggType = typeTag;
                         } else if (typeTag != ATypeTag.SYSTEM_NULL && typeTag != aggType) {
                             throw new AlgebricksException("Unexpected type " + typeTag
@@ -129,7 +152,7 @@
                         }
                         if (typeTag != ATypeTag.SYSTEM_NULL) {
                             ++count;
-                        }                        
+                        }
                         switch (typeTag) {
                             case INT8: {
                                 byte val = AInt8SerializerDeserializer.getByte(inputVal.getByteArray(), 1);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/GlobalAvgAggregateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/GlobalAvgAggregateDescriptor.java
index 347f5e7..d262ef4 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/GlobalAvgAggregateDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/GlobalAvgAggregateDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.runtime.aggregates.std;
 
 import java.io.DataOutput;
@@ -6,7 +21,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
@@ -61,8 +76,15 @@
         List<IAType> unionList = new ArrayList<IAType>();
         unionList.add(BuiltinType.ANULL);
         unionList.add(BuiltinType.ADOUBLE);
-        final ARecordType recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
-                new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, false);
+        ARecordType tmpRecType;
+        try {
+            tmpRecType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
+                    new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, false);
+        } catch (AsterixException e) {
+            throw new AlgebricksException(e);
+        }
+
+        final ARecordType recType = tmpRecType;
 
         return new ICopyAggregateFunctionFactory() {
             private static final long serialVersionUID = 1L;
@@ -114,7 +136,7 @@
                         inputVal.reset();
                         eval.evaluate(tuple);
                         byte[] serBytes = inputVal.getByteArray();
-                        ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serBytes[0]);                        
+                        ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serBytes[0]);
                         switch (typeTag) {
                             case NULL: {
                                 metNull = true;
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/LocalAvgAggregateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/LocalAvgAggregateDescriptor.java
index de02246..5400d78 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/LocalAvgAggregateDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/LocalAvgAggregateDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.runtime.aggregates.std;
 
 import java.io.DataOutput;
@@ -6,7 +21,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
@@ -72,8 +87,15 @@
                 List<IAType> unionList = new ArrayList<IAType>();
                 unionList.add(BuiltinType.ANULL);
                 unionList.add(BuiltinType.ADOUBLE);
-                final ARecordType recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
-                        new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, false);
+                ARecordType tmpRecType;
+                try {
+                    tmpRecType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
+                            new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, false);
+                } catch (AsterixException e) {
+                    throw new AlgebricksException(e);
+                }
+
+                final ARecordType recType = tmpRecType;
 
                 return new ICopyAggregateFunction() {
 
@@ -204,5 +226,4 @@
             }
         };
     }
-
 }
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
new file mode 100644
index 0000000..05391de
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalDayAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "day", 1);
+
+    // allowed input types
+    private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+    private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+    private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+    private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new TemporalDayAccessor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+                    // for output: type integer
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINT32);
+                    private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        byte[] bytes = argOut.getByteArray();
+
+                        try {
+
+                            if (bytes[0] == SER_DURATION_TYPE_TAG) {
+                                aMutableInt32.setValue(calSystem.getDurationDay(ADurationSerializerDeserializer
+                                        .getDayTime(bytes, 1)));
+                                intSerde.serialize(aMutableInt32, out);
+                                return;
+                            }
+
+                            long chrononTimeInMs = 0;
+                            if (bytes[0] == SER_DATE_TYPE_TAG) {
+                                chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1)
+                                        * GregorianCalendarSystem.CHRONON_OF_DAY;
+                            } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+                                chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+                            } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            } else {
+                                throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+                            }
+
+                            int year = calSystem.getYear(chrononTimeInMs);
+                            int month = calSystem.getMonthOfYear(chrononTimeInMs, year);
+                            int day = calSystem.getDayOfMonthYear(chrononTimeInMs, year, month);
+
+                            aMutableInt32.setValue(day);
+                            intSerde.serialize(aMutableInt32, out);
+
+                        } catch (IOException e) {
+                            throw new AlgebricksException(e);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
new file mode 100644
index 0000000..eba012f
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalHourAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "hour", 1);
+
+    // allowed input types
+    private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+    private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+    private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+    private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new TemporalHourAccessor();
+        }
+
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+                    // for output: type integer
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINT32);
+                    private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        byte[] bytes = argOut.getByteArray();
+
+                        try {
+
+                            if (bytes[0] == SER_DURATION_TYPE_TAG) {
+                                aMutableInt32.setValue(calSystem.getDurationHour(ADurationSerializerDeserializer
+                                        .getDayTime(bytes, 1)));
+                                intSerde.serialize(aMutableInt32, out);
+                                return;
+                            }
+
+                            long chrononTimeInMs = 0;
+                            if (bytes[0] == SER_TIME_TYPE_TAG) {
+                                chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1);
+                            } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+                                chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+                            } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            } else {
+                                throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+                            }
+
+                            int hour = calSystem.getHourOfDay(chrononTimeInMs);
+
+                            aMutableInt32.setValue(hour);
+                            intSerde.serialize(aMutableInt32, out);
+
+                        } catch (IOException e) {
+                            throw new AlgebricksException(e);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
new file mode 100644
index 0000000..ecc1a35
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalMillisecondAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "millisecond", 1);
+
+    // allowed input types
+    private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+    private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+    private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+    private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new TemporalMillisecondAccessor();
+        }
+
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+                    // for output: type integer
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINT32);
+                    private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        byte[] bytes = argOut.getByteArray();
+
+                        try {
+
+                            if (bytes[0] == SER_DURATION_TYPE_TAG) {
+                                aMutableInt32.setValue(calSystem.getDurationMillisecond(ADurationSerializerDeserializer
+                                        .getDayTime(bytes, 1)));
+                                intSerde.serialize(aMutableInt32, out);
+                                return;
+                            }
+
+                            long chrononTimeInMs = 0;
+                            if (bytes[0] == SER_TIME_TYPE_TAG) {
+                                chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1);
+                            } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+                                chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+                            } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            } else {
+                                throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+                            }
+
+                            int ms = calSystem.getMillisOfSec(chrononTimeInMs);
+
+                            aMutableInt32.setValue(ms);
+                            intSerde.serialize(aMutableInt32, out);
+
+                        } catch (IOException e) {
+                            throw new AlgebricksException(e);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
new file mode 100644
index 0000000..f436016
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalMinuteAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "minute", 1);
+
+    // allowed input types
+    private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+    private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+    private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+    private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new TemporalMinuteAccessor();
+        }
+
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+                    // for output: type integer
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINT32);
+                    private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        byte[] bytes = argOut.getByteArray();
+
+                        try {
+
+                            if (bytes[0] == SER_DURATION_TYPE_TAG) {
+                                aMutableInt32.setValue(calSystem.getDurationMinute(ADurationSerializerDeserializer
+                                        .getDayTime(bytes, 1)));
+                                intSerde.serialize(aMutableInt32, out);
+                                return;
+                            }
+
+                            long chrononTimeInMs = 0;
+                            if (bytes[0] == SER_TIME_TYPE_TAG) {
+                                chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1);
+                            } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+                                chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+                            } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            } else {
+                                throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+                            }
+
+                            int min = calSystem.getMinOfHour(chrononTimeInMs);
+
+                            aMutableInt32.setValue(min);
+                            intSerde.serialize(aMutableInt32, out);
+
+                        } catch (IOException e) {
+                            throw new AlgebricksException(e);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
new file mode 100644
index 0000000..fb68f7d
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalMonthAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "month", 1);
+
+    // allowed input types
+    private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+    private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+    private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+    private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new TemporalMonthAccessor();
+        }
+
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+                    // for output: type integer
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINT32);
+                    private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        byte[] bytes = argOut.getByteArray();
+
+                        try {
+
+                            if (bytes[0] == SER_DURATION_TYPE_TAG) {
+                                aMutableInt32.setValue(calSystem.getDurationMonth(ADurationSerializerDeserializer
+                                        .getYearMonth(bytes, 1)));
+                                intSerde.serialize(aMutableInt32, out);
+                                return;
+                            }
+
+                            long chrononTimeInMs = 0;
+                            if (bytes[0] == SER_DATE_TYPE_TAG) {
+                                chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1)
+                                        * GregorianCalendarSystem.CHRONON_OF_DAY;
+                            } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+                                chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+                            } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            } else {
+                                throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+                            }
+
+                            int year = calSystem.getYear(chrononTimeInMs);
+                            int month = calSystem.getMonthOfYear(chrononTimeInMs, year);
+
+                            aMutableInt32.setValue(month);
+                            intSerde.serialize(aMutableInt32, out);
+
+                        } catch (IOException e) {
+                            throw new AlgebricksException(e);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
new file mode 100644
index 0000000..3b9ee95
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalSecondAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "second", 1);
+
+    // allowed input types
+    private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+    private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+    private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+    private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new TemporalSecondAccessor();
+        }
+
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+                    // for output: type integer
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINT32);
+                    private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        byte[] bytes = argOut.getByteArray();
+
+                        try {
+
+                            if (bytes[0] == SER_DURATION_TYPE_TAG) {
+                                aMutableInt32.setValue(calSystem.getDurationSecond(ADurationSerializerDeserializer
+                                        .getDayTime(bytes, 1)));
+                                intSerde.serialize(aMutableInt32, out);
+                                return;
+                            }
+
+                            long chrononTimeInMs = 0;
+                            if (bytes[0] == SER_TIME_TYPE_TAG) {
+                                chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1);
+                            } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+                                chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+                            } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            } else {
+                                throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+                            }
+
+                            int sec = calSystem.getSecOfMin(chrononTimeInMs);
+
+                            aMutableInt32.setValue(sec);
+                            intSerde.serialize(aMutableInt32, out);
+
+                        } catch (IOException e) {
+                            throw new AlgebricksException(e);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
new file mode 100644
index 0000000..41dacfd
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
@@ -0,0 +1,156 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalYearAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "year", 1);
+
+    // allowed input types
+    private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+    private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+    private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+    private static final byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+    private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new TemporalYearAccessor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+                    // for output: type integer
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINT32);
+                    private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        byte[] bytes = argOut.getByteArray();
+
+                        try {
+
+                            if (bytes[0] == SER_DURATION_TYPE_TAG) {
+                                aMutableInt32.setValue(calSystem.getDurationYear(ADurationSerializerDeserializer
+                                        .getYearMonth(bytes, 1)));
+                                intSerde.serialize(aMutableInt32, out);
+                                return;
+                            }
+
+                            long chrononTimeInMs = 0;
+                            if (bytes[0] == SER_DATE_TYPE_TAG) {
+                                chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1)
+                                        * GregorianCalendarSystem.CHRONON_OF_DAY;
+                            } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+                                chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+                            } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            } else if (bytes[0] == SER_STRING_TYPE_TAG) {
+                                int year;
+                                if (UTF8StringPointable.charAt(bytes, 3) == '-') {
+                                    // in case of a negative year
+                                    year = -1
+                                            * ((UTF8StringPointable.charAt(bytes, 4) - '0') * 1000
+                                                    + (UTF8StringPointable.charAt(bytes, 5) - '0') * 100
+                                                    + (UTF8StringPointable.charAt(bytes, 6) - '0') * 10 + (UTF8StringPointable
+                                                    .charAt(bytes, 7) - '0'));
+                                } else {
+                                    year = (UTF8StringPointable.charAt(bytes, 3) - '0') * 1000
+                                            + (UTF8StringPointable.charAt(bytes, 4) - '0') * 100
+                                            + (UTF8StringPointable.charAt(bytes, 5) - '0') * 10
+                                            + (UTF8StringPointable.charAt(bytes, 6) - '0');
+                                }
+                                aMutableInt32.setValue(year);
+                                intSerde.serialize(aMutableInt32, out);
+                                return;
+                            } else {
+                                throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+                            }
+
+                            int year = calSystem.getYear(chrononTimeInMs);
+
+                            aMutableInt32.setValue(year);
+                            intSerde.serialize(aMutableInt32, out);
+
+                        } catch (IOException e) {
+                            throw new AlgebricksException(e);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
index 38983e2..db3bc40 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.runtime.evaluators.common;
 
 import java.io.DataOutput;
@@ -5,6 +20,7 @@
 
 import edu.uci.ics.asterix.builders.IARecordBuilder;
 import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -72,8 +88,8 @@
                     }
                 }
                 recBuilder.write(out, true);
-            } catch (IOException ioe) {
-                throw new AlgebricksException(ioe);
+            } catch (IOException | AsterixException e) {
+                throw new AlgebricksException(e);
             }
         }
     }
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/comparisons/AbstractComparisonEvaluator.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/comparisons/AbstractComparisonEvaluator.java
index 18fc2d7..2b4ea3a 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/comparisons/AbstractComparisonEvaluator.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/comparisons/AbstractComparisonEvaluator.java
@@ -2,6 +2,7 @@
 
 import java.io.DataOutput;
 
+import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.ADateOrTimeAscBinaryComparatorFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.ADateTimeAscBinaryComparatorFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
@@ -50,6 +51,8 @@
             .createBinaryComparator();
     protected IBinaryComparator dateTimeBinaryComp = ADateTimeAscBinaryComparatorFactory.INSTANCE
             .createBinaryComparator();
+    protected IBinaryComparator dateOrTimeBinaryComp = ADateOrTimeAscBinaryComparatorFactory.INSTANCE
+            .createBinaryComparator();
 
     public AbstractComparisonEvaluator(DataOutput out, ICopyEvaluatorFactory evalLeftFactory,
             ICopyEvaluatorFactory evalRightFactory) throws AlgebricksException {
@@ -119,6 +122,10 @@
             case DATETIME: {
                 return compareDateTimeWithArg(typeTag2);
             }
+            case DATE:
+            case TIME: {
+                return compareDateOrTimeWithArg(typeTag2);
+            }
             default: {
                 throw new AlgebricksException("Comparison is undefined between types " + typeTag1 + " and " + typeTag2
                         + " .");
@@ -126,6 +133,22 @@
         }
     }
 
+    private ComparisonResult compareDateOrTimeWithArg(ATypeTag typeTag2) throws AlgebricksException {
+        if (typeTag2 == ATypeTag.NULL) {
+            return ComparisonResult.GREATER_THAN;
+        } else if (typeTag2 == ATypeTag.DATETIME) {
+            int result = dateOrTimeBinaryComp.compare(outLeft.getByteArray(), 1, outLeft.getLength() - 1,
+                    outRight.getByteArray(), 1, outRight.getLength() - 1);
+            if (result == 0)
+                return ComparisonResult.EQUAL;
+            else if (result < 0)
+                return ComparisonResult.LESS_THAN;
+            else
+                return ComparisonResult.GREATER_THAN;
+        }
+        throw new AlgebricksException("Comparison is undefined between types ADateTime and " + typeTag2 + " .");
+    }
+
     private ComparisonResult compareDateTimeWithArg(ATypeTag typeTag2) throws AlgebricksException {
         if (typeTag2 == ATypeTag.NULL) {
             return ComparisonResult.GREATER_THAN;
@@ -153,8 +176,8 @@
 
     private ComparisonResult compareStringWithArg(ATypeTag typeTag2) throws AlgebricksException {
         if (typeTag2 == ATypeTag.STRING) {
-            int result = strBinaryComp.compare(outLeft.getByteArray(), 1, outLeft.getLength() - 1, outRight.getByteArray(), 1,
-                    outRight.getLength() - 1);
+            int result = strBinaryComp.compare(outLeft.getByteArray(), 1, outLeft.getLength() - 1,
+                    outRight.getByteArray(), 1, outRight.getLength() - 1);
             if (result == 0)
                 return ComparisonResult.EQUAL;
             else if (result < 0)
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateConstructorDescriptor.java
index cfebf48..44fcf7c 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateConstructorDescriptor.java
@@ -21,7 +21,7 @@
 import edu.uci.ics.asterix.om.base.ADate;
 import edu.uci.ics.asterix.om.base.AMutableDate;
 import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.temporal.ADateAndTimeParser;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
 import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
 import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
 import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
@@ -84,8 +84,10 @@
                             byte[] serString = outInput.getByteArray();
                             if (serString[0] == SER_STRING_TYPE_TAG) {
 
-                                charAccessor.reset(serString, 3, 0);
-                                long chrononTimeInMs = ADateAndTimeParser.parseDatePart(charAccessor, true);
+                                int stringLength = (serString[1] & 0xff << 8) + (serString[2] & 0xff << 0);
+
+                                charAccessor.reset(serString, 3, stringLength);
+                                long chrononTimeInMs = ADateParserFactory.parseDatePart(charAccessor, true);
 
                                 short temp = 0;
                                 if (chrononTimeInMs < 0
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateTimeConstructorDescriptor.java
index 19849aa..6a5783b 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateTimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateTimeConstructorDescriptor.java
@@ -21,7 +21,8 @@
 import edu.uci.ics.asterix.om.base.ADateTime;
 import edu.uci.ics.asterix.om.base.AMutableDateTime;
 import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.temporal.ADateAndTimeParser;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
 import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
 import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
 import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
@@ -71,7 +72,6 @@
                     @SuppressWarnings("unchecked")
                     private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
                             .getSerializerDeserializer(BuiltinType.ANULL);
-
                     private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
 
                     @Override
@@ -82,7 +82,10 @@
                             eval.evaluate(tuple);
                             byte[] serString = outInput.getByteArray();
                             if (serString[0] == SER_STRING_TYPE_TAG) {
-                                charAccessor.reset(serString, 3, 0);
+
+                                int stringLength = (serString[1] & 0xff << 8) + (serString[2] & 0xff << 0);
+
+                                charAccessor.reset(serString, 3, stringLength);
 
                                 // +1 if it is negative (-)
                                 short timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
@@ -96,11 +99,11 @@
                                 timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11)
                                         : (short) (9);
 
-                                long chrononTimeInMs = ADateAndTimeParser.parseDatePart(charAccessor, false);
+                                long chrononTimeInMs = ADateParserFactory.parseDatePart(charAccessor, false);
 
-                                charAccessor.reset(serString, 3, timeOffset);
+                                charAccessor.reset(serString, 3 + timeOffset, stringLength - timeOffset);
 
-                                chrononTimeInMs += ADateAndTimeParser.parseTimePart(charAccessor);
+                                chrononTimeInMs += ATimeParserFactory.parseTimePart(charAccessor);
 
                                 aDateTime.setValue(chrononTimeInMs);
                                 datetimeSerde.serialize(aDateTime, out);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADurationConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADurationConstructorDescriptor.java
index f0660f5..b2b3f4e 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADurationConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADurationConstructorDescriptor.java
@@ -20,7 +20,7 @@
 import edu.uci.ics.asterix.om.base.ADuration;
 import edu.uci.ics.asterix.om.base.AMutableDuration;
 import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.temporal.ADurationParser;
+import edu.uci.ics.asterix.om.base.temporal.ADurationParserFactory;
 import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
 import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
 import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
@@ -83,9 +83,11 @@
 
                             if (serString[0] == SER_STRING_TYPE_TAG) {
 
-                                charAccessor.reset(serString, 3, 0);
+                                int stringLength = (serString[1] & 0xff << 8) + (serString[2] & 0xff << 0);
 
-                                ADurationParser.parse(charAccessor, aDuration);
+                                charAccessor.reset(serString, 3, stringLength);
+
+                                ADurationParserFactory.parseDuration(charAccessor, aDuration);
 
                                 durationSerde.serialize(aDuration, out);
                             } else if (serString[0] == SER_NULL_TYPE_TAG) {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AInt64ConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AInt64ConstructorDescriptor.java
index 15e3e72..6fa4925 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AInt64ConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AInt64ConstructorDescriptor.java
@@ -98,8 +98,9 @@
                                     else
                                         throw new AlgebricksException(errorMessage);
                                 }
-                                if (value < 0)
+                                if (value < 0 && value != -9223372036854775808L) {
                                     throw new AlgebricksException(errorMessage);
+                                }
                                 if (value > 0 && !positive)
                                     value *= -1;
 
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java
new file mode 100644
index 0000000..e1a12f8
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.constructors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AIntervalFromDateConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-from-date", 2);
+    private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new AIntervalFromDateConstructorDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+        return new ICopyEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+                    private String errorMessage = "This can not be an instance of interval (from Date)";
+                    //TODO: Where to move and fix these?
+                    private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINTERVAL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+                        argOut0.reset();
+                        argOut1.reset();
+                        eval0.evaluate(tuple);
+                        eval1.evaluate(tuple);
+
+                        try {
+
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
+                                    && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+
+                                int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+                                        + (argOut0.getByteArray()[2] & 0xff << 0);
+
+                                // start date
+                                charAccessor.reset(argOut0.getByteArray(), 3, stringLength);
+                                long intervalStart = ADateParserFactory.parseDatePart(charAccessor, true)
+                                        / GregorianCalendarSystem.CHRONON_OF_DAY;
+                                // end date
+
+                                stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+                                        + (argOut1.getByteArray()[2] & 0xff << 0);
+
+                                charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+                                long intervalEnd = ADateParserFactory.parseDatePart(charAccessor, true)
+                                        / GregorianCalendarSystem.CHRONON_OF_DAY;
+
+                                if (intervalEnd < intervalStart) {
+                                    throw new AlgebricksException(
+                                            "Interval end must not be less than the interval start.");
+                                }
+
+                                aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATE.serialize());
+                                intervalSerde.serialize(aInterval, out);
+                            } else {
+                                throw new AlgebricksException("Wrong format for interval constructor from dates.");
+                            }
+
+                        } catch (IOException e1) {
+                            throw new AlgebricksException(errorMessage);
+                        } catch (Exception e2) {
+                            throw new AlgebricksException(e2);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java
new file mode 100644
index 0000000..72a8e37
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java
@@ -0,0 +1,167 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.constructors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AIntervalFromDateTimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-from-datetime", 2);
+    private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new AIntervalFromDateTimeConstructorDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+        return new ICopyEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+                    private String errorMessage = "This can not be an instance of interval (from Date)";
+                    //TODO: Where to move and fix these?
+                    private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINTERVAL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+                        argOut0.reset();
+                        argOut1.reset();
+                        eval0.evaluate(tuple);
+                        eval1.evaluate(tuple);
+
+                        try {
+
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
+                                    && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+                                // start date
+
+                                int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+                                        + (argOut0.getByteArray()[2] & 0xff << 0);
+
+                                charAccessor.reset(argOut0.getByteArray(), 3, stringLength);
+                                // get offset for time part: +1 if it is negative (-)
+                                short timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
+
+                                if (charAccessor.getCharAt(timeOffset + 10) != 'T'
+                                        && charAccessor.getCharAt(timeOffset + 8) != 'T') {
+                                    throw new AlgebricksException(errorMessage + ": missing T");
+                                }
+
+                                // if extended form 11, else 9
+                                timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11)
+                                        : (short) (9);
+                                long intervalStart = ADateParserFactory.parseDatePart(charAccessor, false);
+                                charAccessor.reset(argOut0.getByteArray(), 3 + timeOffset, stringLength - timeOffset);
+                                intervalStart += ATimeParserFactory.parseTimePart(charAccessor);
+
+                                // end date
+
+                                stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+                                        + (argOut1.getByteArray()[2] & 0xff << 0);
+
+                                charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+                                // get offset for time part: +1 if it is negative (-)
+                                timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
+
+                                if (charAccessor.getCharAt(timeOffset + 10) != 'T'
+                                        && charAccessor.getCharAt(timeOffset + 8) != 'T') {
+                                    throw new AlgebricksException(errorMessage + ": missing T");
+                                }
+
+                                // if extended form 11, else 9
+                                timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11)
+                                        : (short) (9);
+                                long intervalEnd = ADateParserFactory.parseDatePart(charAccessor, false);
+                                charAccessor.reset(argOut1.getByteArray(), 3 + timeOffset, stringLength - timeOffset);
+                                intervalEnd += ATimeParserFactory.parseTimePart(charAccessor);
+
+                                if (intervalEnd < intervalStart) {
+                                    throw new AlgebricksException(
+                                            "Interval end must not be less than the interval start.");
+                                }
+
+                                aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATETIME.serialize());
+                                intervalSerde.serialize(aInterval, out);
+                            } else {
+                                throw new AlgebricksException("Wrong format for interval constructor from dates.");
+                            }
+
+                        } catch (IOException e1) {
+                            throw new AlgebricksException(errorMessage);
+                        } catch (Exception e2) {
+                            throw new AlgebricksException(e2);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
new file mode 100644
index 0000000..3ce722a
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
@@ -0,0 +1,147 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.constructors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AIntervalFromTimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-from-time", 2);
+    private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new AIntervalFromTimeConstructorDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+        return new ICopyEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+                    private String errorMessage = "This can not be an instance of interval (from Date)";
+                    //TODO: Where to move and fix these?
+                    private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINTERVAL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+                        argOut0.reset();
+                        argOut1.reset();
+                        eval0.evaluate(tuple);
+                        eval1.evaluate(tuple);
+
+                        try {
+
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
+                                    && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+                                // start date
+
+                                int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+                                        + (argOut0.getByteArray()[2] & 0xff << 0);
+
+                                charAccessor.reset(argOut0.getByteArray(), 3, stringLength);
+                                long intervalStart = ATimeParserFactory.parseTimePart(charAccessor);
+                                if (intervalStart < 0) {
+                                    intervalStart += GregorianCalendarSystem.CHRONON_OF_DAY;
+                                }
+
+                                // end date
+
+                                stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+                                        + (argOut1.getByteArray()[2] & 0xff << 0);
+
+                                charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+                                long intervalEnd = ATimeParserFactory.parseTimePart(charAccessor);
+                                if (intervalEnd < 0) {
+                                    intervalEnd += GregorianCalendarSystem.CHRONON_OF_DAY;
+                                }
+
+                                if (intervalEnd < intervalStart) {
+                                    throw new AlgebricksException(
+                                            "Interval end must not be less than the interval start.");
+                                }
+
+                                aInterval.setValue(intervalStart, intervalEnd, ATypeTag.TIME.serialize());
+                                intervalSerde.serialize(aInterval, out);
+                            } else {
+                                throw new AlgebricksException("Wrong format for interval constructor from dates.");
+                            }
+
+                        } catch (IOException e1) {
+                            throw new AlgebricksException(errorMessage);
+                        } catch (Exception e2) {
+                            throw new AlgebricksException(e2);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
new file mode 100644
index 0000000..23fdb07
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.constructors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ADurationParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AIntervalStartFromDateConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-start-from-date", 2);
+    private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new AIntervalStartFromDateConstructorDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+        return new ICopyEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+                    private String errorMessage = "This can not be an instance of interval (from Date)";
+
+                    private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
+                    private AMutableDuration aDuration = new AMutableDuration(0, 0L);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINTERVAL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+                        argOut0.reset();
+                        argOut1.reset();
+                        eval0.evaluate(tuple);
+                        eval1.evaluate(tuple);
+
+                        try {
+
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
+                                    && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+                                // start date
+
+                                int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+                                        + (argOut0.getByteArray()[2] & 0xff << 0);
+
+                                charAccessor.reset(argOut0.getByteArray(), 3, stringLength);
+                                long intervalStart = ADateParserFactory.parseDatePart(charAccessor, true);
+                                // duration
+
+                                stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+                                        + (argOut1.getByteArray()[2] & 0xff << 0);
+
+                                charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+                                ADurationParserFactory.parseDuration(charAccessor, aDuration);
+
+                                long intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
+                                        aDuration.getMonths(), aDuration.getMilliseconds());
+
+                                intervalStart = GregorianCalendarSystem.getChrononInDays(intervalStart);
+                                intervalEnd = GregorianCalendarSystem.getChrononInDays(intervalEnd);
+
+                                if (intervalEnd < intervalStart) {
+                                    throw new AlgebricksException(
+                                            "Interval end must not be less than the interval start.");
+                                }
+
+                                aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATE.serialize());
+                                intervalSerde.serialize(aInterval, out);
+                            } else {
+                                throw new AlgebricksException("Wrong format for interval constructor from dates.");
+                            }
+
+                        } catch (IOException e1) {
+                            throw new AlgebricksException(errorMessage);
+                        } catch (Exception e2) {
+                            throw new AlgebricksException(e2);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
new file mode 100644
index 0000000..91c45df
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.constructors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ADurationParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AIntervalStartFromDateTimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-start-from-datetime", 2);
+    private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new AIntervalStartFromDateTimeConstructorDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+        return new ICopyEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+                    private String errorMessage = "This can not be an instance of interval (from Date)";
+
+                    private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
+                    private AMutableDuration aDuration = new AMutableDuration(0, 0L);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINTERVAL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+                        argOut0.reset();
+                        argOut1.reset();
+                        eval0.evaluate(tuple);
+                        eval1.evaluate(tuple);
+
+                        try {
+
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
+                                    && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+                                // start date
+
+                                int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+                                        + (argOut0.getByteArray()[2] & 0xff << 0);
+
+                                charAccessor.reset(argOut0.getByteArray(), 3, stringLength);
+                                // get offset for time part: +1 if it is negative (-)
+                                short timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
+
+                                if (charAccessor.getCharAt(timeOffset + 10) != 'T'
+                                        && charAccessor.getCharAt(timeOffset + 8) != 'T') {
+                                    throw new AlgebricksException(errorMessage + ": missing T");
+                                }
+
+                                // if extended form 11, else 9
+                                timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11)
+                                        : (short) (9);
+                                long intervalStart = ADateParserFactory.parseDatePart(charAccessor, false);
+                                charAccessor.reset(argOut0.getByteArray(), 3 + timeOffset, stringLength - timeOffset);
+                                intervalStart += ATimeParserFactory.parseTimePart(charAccessor);
+
+                                // duration
+
+                                stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+                                        + (argOut1.getByteArray()[2] & 0xff << 0);
+
+                                charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+                                ADurationParserFactory.parseDuration(charAccessor, aDuration);
+
+                                long intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
+                                        aDuration.getMonths(), aDuration.getMilliseconds());
+
+                                if (intervalEnd < intervalStart) {
+                                    throw new AlgebricksException(
+                                            "Interval end must not be less than the interval start.");
+                                }
+
+                                aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATETIME.serialize());
+                                intervalSerde.serialize(aInterval, out);
+                            } else {
+                                throw new AlgebricksException("Wrong format for interval constructor from dates.");
+                            }
+
+                        } catch (IOException e1) {
+                            throw new AlgebricksException(errorMessage);
+                        } catch (Exception e2) {
+                            throw new AlgebricksException(e2);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
new file mode 100644
index 0000000..e576fef
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.constructors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ADurationParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AIntervalStartFromTimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-start-from-time", 2);
+    private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new AIntervalStartFromTimeConstructorDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+        return new ICopyEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+                    private String errorMessage = "This can not be an instance of interval (from Date)";
+
+                    private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
+                    private AMutableDuration aDuration = new AMutableDuration(0, 0L);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINTERVAL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+                        argOut0.reset();
+                        argOut1.reset();
+                        eval0.evaluate(tuple);
+                        eval1.evaluate(tuple);
+
+                        try {
+
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
+                                    && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+                                // start time
+
+                                int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+                                        + (argOut0.getByteArray()[2] & 0xff << 0);
+
+                                charAccessor.reset(argOut0.getByteArray(), 3, stringLength);
+                                int intervalStart = ATimeParserFactory.parseTimePart(charAccessor);
+
+                                if (intervalStart < 0) {
+                                    intervalStart += GregorianCalendarSystem.CHRONON_OF_DAY;
+                                }
+
+                                // duration
+
+                                stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+                                        + (argOut1.getByteArray()[2] & 0xff << 0);
+
+                                charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+                                ADurationParserFactory.parseDuration(charAccessor, aDuration);
+
+                                if (aDuration.getMonths() != 0) {
+                                    throw new AlgebricksException("Cannot add a year-month duration to a time value.");
+                                }
+
+                                int intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
+                                        aDuration.getMilliseconds());
+
+                                if (intervalEnd > GregorianCalendarSystem.CHRONON_OF_DAY) {
+
+                                    intervalEnd = intervalEnd - (int) (GregorianCalendarSystem.CHRONON_OF_DAY);
+                                }
+
+                                if (intervalEnd < intervalStart) {
+                                    throw new AlgebricksException(
+                                            "Interval end must not be less than the interval start.");
+                                }
+
+                                aInterval.setValue(intervalStart, intervalEnd, ATypeTag.TIME.serialize());
+                                intervalSerde.serialize(aInterval, out);
+                            } else {
+                                throw new AlgebricksException("Wrong format for interval constructor from dates.");
+                            }
+
+                        } catch (IOException e1) {
+                            throw new AlgebricksException(errorMessage);
+                        } catch (Exception e2) {
+                            throw new AlgebricksException(e2);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java
index 1a7eb12..caff78b 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java
@@ -21,8 +21,9 @@
 import edu.uci.ics.asterix.om.base.AMutableTime;
 import edu.uci.ics.asterix.om.base.ANull;
 import edu.uci.ics.asterix.om.base.ATime;
-import edu.uci.ics.asterix.om.base.temporal.ADateAndTimeParser;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
 import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
 import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
 import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
 import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
@@ -82,8 +83,15 @@
                             eval.evaluate(tuple);
                             byte[] serString = outInput.getByteArray();
                             if (serString[0] == SER_STRING_TYPE_TAG) {
-                                charAccessor.reset(serString, 3, 0);
-                                int chrononTimeInMs = ADateAndTimeParser.parseTimePart(charAccessor);
+
+                                int stringLength = (serString[1] & 0xff << 8) + (serString[2] & 0xff << 0);
+
+                                charAccessor.reset(serString, 3, stringLength);
+                                int chrononTimeInMs = ATimeParserFactory.parseTimePart(charAccessor);
+
+                                if (chrononTimeInMs < 0) {
+                                    chrononTimeInMs += GregorianCalendarSystem.CHRONON_OF_DAY;
+                                }
 
                                 aTime.setValue(chrononTimeInMs);
                                 timeSerde.serialize(aTime, out);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/OpenRecordConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/OpenRecordConstructorDescriptor.java
index 77869ba..81ae62b 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/OpenRecordConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/OpenRecordConstructorDescriptor.java
@@ -1,9 +1,25 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.asterix.runtime.evaluators.functions;
 
 import java.io.DataOutput;
 import java.io.IOException;
 
 import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
 import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
 import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
@@ -86,8 +102,8 @@
                                 }
                             }
                             recBuilder.write(out, true);
-                        } catch (IOException ioe) {
-                            throw new AlgebricksException(ioe);
+                        } catch (IOException | AsterixException e) {
+                            throw new AlgebricksException(e);
                         }
                     }
                 };
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/YearDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/YearDescriptor.java
deleted file mode 100644
index 107f3cd..0000000
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/YearDescriptor.java
+++ /dev/null
@@ -1,102 +0,0 @@
-package edu.uci.ics.asterix.runtime.evaluators.functions;
-
-import java.io.DataOutput;
-
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AMutableInt32;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
-import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.EnumDeserializer;
-import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-public class YearDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
-    private static final long serialVersionUID = 1L;
-    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-    private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
-    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
-        public IFunctionDescriptor createFunctionDescriptor() {
-            return new YearDescriptor();
-        }
-    };
-
-    @Override
-    public FunctionIdentifier getIdentifier() {
-        return AsterixBuiltinFunctions.YEAR;
-    }
-
-    /**
-     * Returns the 4-digit representation of a year from a string, as an int32.
-     * e.g. year('2010-10-24') = 2010
-     */
-
-    @Override
-    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
-
-        return new ICopyEvaluatorFactory() {
-            private static final long serialVersionUID = 1L;
-
-            @Override
-            public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {
-                final DataOutput out = output.getDataOutput();
-
-                return new ICopyEvaluator() {
-                    private ArrayBackedValueStorage out1 = new ArrayBackedValueStorage();
-                    private ICopyEvaluator eval1 = args[0].createEvaluator(out1);
-                    private AMutableInt32 m = new AMutableInt32(0);
-                    @SuppressWarnings("unchecked")
-                    private ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
-                            .getSerializerDeserializer(BuiltinType.AINT32);
-                    @SuppressWarnings("unchecked")
-                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
-                            .getSerializerDeserializer(BuiltinType.ANULL);
-
-                    @Override
-                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
-                        try {
-                            out1.reset();
-                            eval1.evaluate(tuple);
-                            byte[] dateArray = out1.getByteArray();
-
-                            if (dateArray[0] == SER_NULL_TYPE_TAG) {
-                                nullSerde.serialize(ANull.NULL, out);
-                                return;
-                            }
-
-                            if (dateArray[0] != SER_STRING_TYPE_TAG) {
-                                throw new AlgebricksException("year function can not be called on values of type"
-                                        + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(dateArray[0]));
-                            }
-
-                            int year = (UTF8StringPointable.charAt(dateArray, 3) - '0') * 1000
-                                    + (UTF8StringPointable.charAt(dateArray, 4) - '0') * 100
-                                    + (UTF8StringPointable.charAt(dateArray, 5) - '0') * 10
-                                    + (UTF8StringPointable.charAt(dateArray, 6) - '0');
-                            m.setValue(year);
-
-                            int32Serde.serialize(m, out);
-                        } catch (HyracksDataException e) {
-                            throw new AlgebricksException(e);
-                        }
-                    }
-                };
-            }
-        };
-    }
-
-}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java
new file mode 100644
index 0000000..241e2e0
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AIntervalSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public abstract class AbstractIntervalLogicFuncDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+
+    // allowed input types
+    private final static byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_INTERVAL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] != SER_INTERVAL_TYPE_TAG) {
+                                throw new AlgebricksException("Inapplicable input type for parameters: ("
+                                        + argOut0.getByteArray()[0] + ", " + argOut1.getByteArray()[0] + ")");
+                            }
+
+                            if (AIntervalSerializerDeserializer.getIntervalTimeType(argOut0.getByteArray(), 1) != AIntervalSerializerDeserializer
+                                    .getIntervalTimeType(argOut1.getByteArray(), 1)) {
+                                throw new AlgebricksException(
+                                        "Failed to compare to intervals with different internal time type.");
+                            }
+
+                            ABoolean res = (compareIntervals(
+                                    AIntervalSerializerDeserializer.getIntervalStart(argOut0.getByteArray(), 1),
+                                    AIntervalSerializerDeserializer.getIntervalEnd(argOut0.getByteArray(), 1),
+                                    AIntervalSerializerDeserializer.getIntervalStart(argOut1.getByteArray(), 1),
+                                    AIntervalSerializerDeserializer.getIntervalEnd(argOut1.getByteArray(), 1))) ? ABoolean.TRUE
+                                    : ABoolean.FALSE;
+
+                            booleanSerde.serialize(res, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    protected abstract boolean compareIntervals(long s1, long e1, long s2, long e2);
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddDateDurationDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddDateDurationDescriptor.java
new file mode 100644
index 0000000..7417f2b
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddDateDurationDescriptor.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADate;
+import edu.uci.ics.asterix.om.base.AMutableDate;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AddDateDurationDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "add-date-duration", 2);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+    private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new AddDateDurationDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADate> dateSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADATE);
+
+                    private AMutableDate aDate = new AMutableDate(0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_DATE_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 0: expecting a Date ("
+                                                + SER_DATE_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                + "), but got: " + argOut0.getByteArray()[0]);
+                            }
+
+                            if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 1: expecting a Duration ("
+                                                + SER_DURATION_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                + "), but got: " + argOut1.getByteArray()[0]);
+                            }
+
+                            // get duration fields: yearMonth field and dayTime field
+                            int yearMonth = ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1);
+                            long dayTime = ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1);
+
+                            // get date fields
+                            long datetimeChronon = ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1)
+                                    * GregorianCalendarSystem.CHRONON_OF_DAY;
+
+                            datetimeChronon = DurationArithmeticOperations.addDuration(datetimeChronon, yearMonth,
+                                    dayTime);
+
+                            int dateChrononInDays = (int) (datetimeChronon / GregorianCalendarSystem.CHRONON_OF_DAY);
+                            if (dateChrononInDays < 0 && datetimeChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) {
+                                dateChrononInDays -= 1;
+                            }
+
+                            aDate.setValue(dateChrononInDays);
+
+                            dateSerde.serialize(aDate, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddDatetimeDurationDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddDatetimeDurationDescriptor.java
new file mode 100644
index 0000000..4f04da3
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddDatetimeDurationDescriptor.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADateTime;
+import edu.uci.ics.asterix.om.base.AMutableDateTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AddDatetimeDurationDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "add-datetime-duration", 2);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+    private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new AddDatetimeDurationDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADateTime> datetimeSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADATETIME);
+
+                    private AMutableDateTime aDatetime = new AMutableDateTime(0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 0: expecting a DateTime ("
+                                                + SER_DATETIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                + "), but got: " + argOut0.getByteArray()[0]);
+                            }
+
+                            if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 1: expecting a Duration ("
+                                                + SER_DURATION_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                + "), but got: " + argOut1.getByteArray()[0]);
+                            }
+
+                            // get duration fields: yearMonth field and dayTime field
+                            int yearMonth = ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1);
+                            long dayTime = ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1);
+
+                            // get date fields
+                            long datetimeChronon = ADateTimeSerializerDeserializer
+                                    .getChronon(argOut0.getByteArray(), 1);
+
+                            datetimeChronon = DurationArithmeticOperations.addDuration(datetimeChronon, yearMonth,
+                                    dayTime);
+
+                            aDatetime.setValue(datetimeChronon);
+
+                            datetimeSerde.serialize(aDatetime, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddTimeDurationDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddTimeDurationDescriptor.java
new file mode 100644
index 0000000..e6b6ebd
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddTimeDurationDescriptor.java
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AMutableTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.ATime;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AddTimeDurationDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "add-time-duration", 2);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+    private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new AddTimeDurationDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ATime> timeSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ATIME);
+
+                    private AMutableTime aTime = new AMutableTime(0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_TIME_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 0: expecting a Time ("
+                                                + SER_TIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                + "), but got: " + argOut0.getByteArray()[0]);
+                            }
+
+                            if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 1: expecting a Duration ("
+                                                + SER_DURATION_TYPE_TAG + ") or null (" + SER_DURATION_TYPE_TAG
+                                                + "), but got: " + argOut1.getByteArray()[0]);
+                            }
+
+                            // get duration fields: yearMonth field and dayTime field
+                            int yearMonth = ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1);
+
+                            // cannot add a year-month duration to a time value
+                            if (yearMonth != 0) {
+                                throw new AlgebricksException("ATime cannot be added by a year-month duration.");
+                            }
+
+                            long dayTime = ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1);
+
+                            // get time fields
+                            int timeChronon = ATimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
+
+                            timeChronon = DurationArithmeticOperations.addDuration(timeChronon, dayTime);
+
+                            aTime.setValue(timeChronon);
+
+                            timeSerde.serialize(aTime, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AdjustDateTimeForTimeZoneDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AdjustDateTimeForTimeZoneDescriptor.java
new file mode 100644
index 0000000..fb01e97
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AdjustDateTimeForTimeZoneDescriptor.java
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem.Fields;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AdjustDateTimeForTimeZoneDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "adjust-datetime-for-timezone", 2);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+    private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new AdjustDateTimeForTimeZoneDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+                    private GregorianCalendarSystem calInstance = GregorianCalendarSystem.getInstance();
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 0: expecting a Datetime, but got: "
+                                                + argOut0.getByteArray()[0]);
+                            }
+
+                            if (argOut1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 0: expecting a String, but got: "
+                                                + argOut1.getByteArray()[0]);
+                            }
+
+                            int stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+                                    + (argOut1.getByteArray()[2] & 0xff << 0);
+
+                            charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+
+                            int timezone = ATimeParserFactory.parseTimezonePart(charAccessor, 0);
+
+                            if (!calInstance.validateTimeZone(timezone)) {
+                                throw new AlgebricksException("Wrong format for a time zone string!");
+                            }
+
+                            long chronon = ADateTimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
+
+                            chronon = calInstance.adjustChrononByTimezone(chronon, timezone);
+
+                            StringBuilder sbder = new StringBuilder();
+
+                            calInstance.getExtendStringRepWithTimezoneUntilField(chronon, timezone, sbder, Fields.YEAR,
+                                    Fields.MILLISECOND);
+
+                            out.writeByte(SER_STRING_TYPE_TAG);
+                            out.writeUTF(sbder.toString());
+
+                        } catch (Exception e1) {
+                            throw new AlgebricksException(e1);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AdjustTimeForTimeZoneDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AdjustTimeForTimeZoneDescriptor.java
new file mode 100644
index 0000000..1323664
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AdjustTimeForTimeZoneDescriptor.java
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem.Fields;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AdjustTimeForTimeZoneDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "adjust-time-for-timezone", 2);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+    private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new AdjustTimeForTimeZoneDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+                    private GregorianCalendarSystem calInstance = GregorianCalendarSystem.getInstance();
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_TIME_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 0: expecting a Time, but got: "
+                                                + argOut0.getByteArray()[0]);
+                            }
+
+                            if (argOut1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 0: expecting a String, but got: "
+                                                + argOut1.getByteArray()[0]);
+                            }
+
+                            int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+                                    + (argOut0.getByteArray()[2] & 0xff << 0);
+
+                            charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+
+                            int timezone = ATimeParserFactory.parseTimezonePart(charAccessor, 0);
+
+                            if (!calInstance.validateTimeZone(timezone)) {
+                                throw new AlgebricksException("Wrong format for a time zone string!");
+                            }
+
+                            int chronon = ATimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
+
+                            chronon = (int) calInstance.adjustChrononByTimezone(chronon, timezone);
+
+                            StringBuilder sbder = new StringBuilder();
+
+                            calInstance.getExtendStringRepWithTimezoneUntilField(chronon, timezone, sbder, Fields.HOUR,
+                                    Fields.MILLISECOND);
+
+                            out.writeByte(SER_STRING_TYPE_TAG);
+                            out.writeUTF(sbder.toString());
+
+                        } catch (Exception e1) {
+                            throw new AlgebricksException(e1);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java
new file mode 100644
index 0000000..c6be030
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java
@@ -0,0 +1,235 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADuration;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class CalendarDuartionFromDateDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "calendar-duration-from-date", 2);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+    private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new CalendarDuartionFromDateDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADURATION);
+
+                    private AMutableDuration aDuration = new AMutableDuration(0, 0);
+
+                    private GregorianCalendarSystem calInstanct = GregorianCalendarSystem.getInstance();
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_DATE_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 0: expecting a Date, but got: "
+                                                + argOut0.getByteArray()[0]);
+                            }
+
+                            if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 1: expecting a Date, but got: "
+                                                + argOut1.getByteArray()[0]);
+                            }
+
+                            int yearMonthDurationInMonths = ADurationSerializerDeserializer.getYearMonth(
+                                    argOut1.getByteArray(), 1);
+                            long dayTimeDurationInMs = ADurationSerializerDeserializer.getDayTime(
+                                    argOut1.getByteArray(), 1);
+
+                            long startingTimePoint = ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1)
+                                    * GregorianCalendarSystem.CHRONON_OF_DAY;
+
+                            long endingTimePoint = DurationArithmeticOperations.addDuration(startingTimePoint,
+                                    yearMonthDurationInMonths, dayTimeDurationInMs);
+
+                            if (startingTimePoint == endingTimePoint) {
+                                aDuration.setValue(0, 0);
+                            } else {
+
+                                boolean negative = false;
+
+                                if (endingTimePoint < startingTimePoint) {
+                                    negative = true;
+                                    // swap the starting and ending time, so that ending time is always larger than the starting time.
+                                    long tmpTime = endingTimePoint;
+                                    endingTimePoint = startingTimePoint;
+                                    startingTimePoint = tmpTime;
+                                }
+
+                                int year0 = calInstanct.getYear(startingTimePoint);
+                                int month0 = calInstanct.getMonthOfYear(startingTimePoint, year0);
+
+                                int year1 = calInstanct.getYear(endingTimePoint);
+                                int month1 = calInstanct.getMonthOfYear(endingTimePoint, year1);
+
+                                int year = year1 - year0;
+                                int month = month1 - month0;
+                                int day = calInstanct.getDayOfMonthYear(endingTimePoint, year1, month1)
+                                        - calInstanct.getDayOfMonthYear(startingTimePoint, year0, month0);
+                                int hour = calInstanct.getHourOfDay(endingTimePoint)
+                                        - calInstanct.getHourOfDay(startingTimePoint);
+                                int min = calInstanct.getMinOfHour(endingTimePoint)
+                                        - calInstanct.getMinOfHour(startingTimePoint);
+                                int sec = calInstanct.getSecOfMin(endingTimePoint)
+                                        - calInstanct.getSecOfMin(startingTimePoint);
+                                int ms = calInstanct.getMillisOfSec(endingTimePoint)
+                                        - calInstanct.getMillisOfSec(startingTimePoint);
+
+                                if (ms < 0) {
+                                    ms += GregorianCalendarSystem.CHRONON_OF_SECOND;
+                                    sec -= 1;
+                                }
+
+                                if (sec < 0) {
+                                    sec += GregorianCalendarSystem.CHRONON_OF_MINUTE
+                                            / GregorianCalendarSystem.CHRONON_OF_SECOND;
+                                    min -= 1;
+                                }
+
+                                if (min < 0) {
+                                    min += GregorianCalendarSystem.CHRONON_OF_HOUR
+                                            / GregorianCalendarSystem.CHRONON_OF_MINUTE;
+                                    hour -= 1;
+                                }
+
+                                if (hour < 0) {
+                                    hour += GregorianCalendarSystem.CHRONON_OF_DAY
+                                            / GregorianCalendarSystem.CHRONON_OF_HOUR;
+                                    day -= 1;
+                                }
+
+                                if (day < 0) {
+                                    boolean isLeapYear = calInstanct.isLeapYear(year0);
+                                    day += (isLeapYear) ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[month0 - 1])
+                                            : (GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[month0 - 1]);
+                                    month -= 1;
+                                }
+
+                                if (month < 0) {
+                                    month += GregorianCalendarSystem.MONTHS_IN_A_YEAR;
+                                    year -= 1;
+                                }
+
+                                if (negative) {
+                                    aDuration.setValue(-1 * (year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month),
+                                            -1
+                                                    * (day * GregorianCalendarSystem.CHRONON_OF_DAY + hour
+                                                            * GregorianCalendarSystem.CHRONON_OF_HOUR + min
+                                                            * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec
+                                                            * GregorianCalendarSystem.CHRONON_OF_SECOND + ms));
+                                } else {
+                                    aDuration.setValue(year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month, day
+                                            * GregorianCalendarSystem.CHRONON_OF_DAY + hour
+                                            * GregorianCalendarSystem.CHRONON_OF_HOUR + min
+                                            * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec
+                                            * GregorianCalendarSystem.CHRONON_OF_SECOND + ms);
+                                }
+                            }
+
+                            durationSerde.serialize(aDuration, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java
new file mode 100644
index 0000000..2c7a9a7
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java
@@ -0,0 +1,251 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADuration;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * This function converts a given duration into a "human-readable" duration containing both year-month and day-time
+ * duration parts, by re-organizing values between the duration fields from the given reference time point.
+ * <p/>
+ * The basic algorithm for this convert is simple: <br/>
+ * 1. Calculate the time point by adding the given duration to the given time point;<br/>
+ * 2. Calculate the differences by fields between two different time points;<br/>
+ * 3. Re-format the duration into a human-readable one.
+ * <p/>
+ * Here "human-readable" means the value of each field of the duration is within the value range of the field in the
+ * calendar system. For example, month would be in [0, 12), and hour would be in [0, 24).
+ * <p/>
+ * The result can be considered as a "field-based" difference between the two datetime value, but all negative values
+ * would be converted to be non-negative.
+ * <p/>
+ * In the implementation, we always do the subtraction from the later time point, resulting a positive duration always.
+ * <p/>
+ */
+public class CalendarDurationFromDateTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "calendar-duration-from-datetime", 2);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+    private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new CalendarDurationFromDateTimeDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADURATION);
+
+                    private AMutableDuration aDuration = new AMutableDuration(0, 0);
+
+                    private GregorianCalendarSystem calInstanct = GregorianCalendarSystem.getInstance();
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 0: expecting ADateTime, but got: "
+                                                + argOut0.getByteArray()[0]);
+                            }
+
+                            if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 1: expecting ADateTime, but got: "
+                                                + argOut1.getByteArray()[0]);
+                            }
+
+                            int yearMonthDurationInMonths = ADurationSerializerDeserializer.getYearMonth(
+                                    argOut1.getByteArray(), 1);
+                            long dayTimeDurationInMs = ADurationSerializerDeserializer.getDayTime(
+                                    argOut1.getByteArray(), 1);
+
+                            long startingTimePoint = ADateTimeSerializerDeserializer.getChronon(argOut0.getByteArray(),
+                                    1);
+
+                            long endingTimePoint = DurationArithmeticOperations.addDuration(startingTimePoint,
+                                    yearMonthDurationInMonths, dayTimeDurationInMs);
+
+                            if (startingTimePoint == endingTimePoint) {
+                                aDuration.setValue(0, 0);
+                            } else {
+
+                                boolean negative = false;
+
+                                if (endingTimePoint < startingTimePoint) {
+                                    negative = true;
+                                    // swap the starting and ending time, so that ending time is always larger than the starting time.
+                                    long tmpTime = endingTimePoint;
+                                    endingTimePoint = startingTimePoint;
+                                    startingTimePoint = tmpTime;
+                                }
+
+                                int year0 = calInstanct.getYear(startingTimePoint);
+                                int month0 = calInstanct.getMonthOfYear(startingTimePoint, year0);
+
+                                int year1 = calInstanct.getYear(endingTimePoint);
+                                int month1 = calInstanct.getMonthOfYear(endingTimePoint, year1);
+
+                                int year = year1 - year0;
+                                int month = month1 - month0;
+                                int day = calInstanct.getDayOfMonthYear(endingTimePoint, year1, month1)
+                                        - calInstanct.getDayOfMonthYear(startingTimePoint, year0, month0);
+                                int hour = calInstanct.getHourOfDay(endingTimePoint)
+                                        - calInstanct.getHourOfDay(startingTimePoint);
+                                int min = calInstanct.getMinOfHour(endingTimePoint)
+                                        - calInstanct.getMinOfHour(startingTimePoint);
+                                int sec = calInstanct.getSecOfMin(endingTimePoint)
+                                        - calInstanct.getSecOfMin(startingTimePoint);
+                                int ms = calInstanct.getMillisOfSec(endingTimePoint)
+                                        - calInstanct.getMillisOfSec(startingTimePoint);
+
+                                if (ms < 0) {
+                                    ms += GregorianCalendarSystem.CHRONON_OF_SECOND;
+                                    sec -= 1;
+                                }
+
+                                if (sec < 0) {
+                                    sec += GregorianCalendarSystem.CHRONON_OF_MINUTE
+                                            / GregorianCalendarSystem.CHRONON_OF_SECOND;
+                                    min -= 1;
+                                }
+
+                                if (min < 0) {
+                                    min += GregorianCalendarSystem.CHRONON_OF_HOUR
+                                            / GregorianCalendarSystem.CHRONON_OF_MINUTE;
+                                    hour -= 1;
+                                }
+
+                                if (hour < 0) {
+                                    hour += GregorianCalendarSystem.CHRONON_OF_DAY
+                                            / GregorianCalendarSystem.CHRONON_OF_HOUR;
+                                    day -= 1;
+                                }
+
+                                if (day < 0) {
+                                    boolean isLeapYear = calInstanct.isLeapYear(year0);
+                                    day += (isLeapYear) ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[month0 - 1])
+                                            : (GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[month0 - 1]);
+                                    month -= 1;
+                                }
+
+                                if (month < 0) {
+                                    month += GregorianCalendarSystem.MONTHS_IN_A_YEAR;
+                                    year -= 1;
+                                }
+
+                                if (negative) {
+                                    aDuration.setValue(-1 * (year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month),
+                                            -1
+                                                    * (day * GregorianCalendarSystem.CHRONON_OF_DAY + hour
+                                                            * GregorianCalendarSystem.CHRONON_OF_HOUR + min
+                                                            * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec
+                                                            * GregorianCalendarSystem.CHRONON_OF_SECOND + ms));
+                                } else {
+                                    aDuration.setValue(year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month, day
+                                            * GregorianCalendarSystem.CHRONON_OF_DAY + hour
+                                            * GregorianCalendarSystem.CHRONON_OF_HOUR + min
+                                            * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec
+                                            * GregorianCalendarSystem.CHRONON_OF_SECOND + ms);
+                                }
+                            }
+
+                            durationSerde.serialize(aDuration, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentDateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentDateDescriptor.java
new file mode 100644
index 0000000..0cb47b5
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentDateDescriptor.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADate;
+import edu.uci.ics.asterix.om.base.AMutableDate;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class CurrentDateDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "current-date",
+            0);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new CurrentDateDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADate> dateSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADATE);
+                    private AMutableDate aDate = new AMutableDate(0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        try {
+                            int dateChronon = (int) (System.currentTimeMillis() / GregorianCalendarSystem.CHRONON_OF_DAY);
+                            aDate.setValue(dateChronon);
+                            dateSerde.serialize(aDate, out);
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentDateTimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentDateTimeDescriptor.java
new file mode 100644
index 0000000..9209cae
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentDateTimeDescriptor.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADateTime;
+import edu.uci.ics.asterix.om.base.AMutableDateTime;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class CurrentDateTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "current-datetime", 0);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new CurrentDateTimeDescriptor();
+        }
+    };
+
+    private CurrentDateTimeDescriptor() {
+    }
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADateTime> datetimeSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADATETIME);
+                    private AMutableDateTime aDateTime = new AMutableDateTime(0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        try {
+                            aDateTime.setValue(System.currentTimeMillis());
+                            datetimeSerde.serialize(aDateTime, out);
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentTimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentTimeDescriptor.java
new file mode 100644
index 0000000..c7078b9
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentTimeDescriptor.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AMutableTime;
+import edu.uci.ics.asterix.om.base.ATime;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class CurrentTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "current-time", 0);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new CurrentTimeDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ATime> timeSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ATIME);
+                    private AMutableTime aTime = new AMutableTime(0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        try {
+                            int timeChronon = (int) (System.currentTimeMillis() % GregorianCalendarSystem.CHRONON_OF_DAY);
+                            aTime.setValue(timeChronon);
+                            timeSerde.serialize(aTime, out);
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java
new file mode 100644
index 0000000..4ef52d9
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java
@@ -0,0 +1,108 @@
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADate;
+import edu.uci.ics.asterix.om.base.AMutableDate;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DateFromDatetimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "date-from-datetime", 1);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new DateFromDatetimeDescriptor();
+        }
+
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    // possible returning types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADate> dateSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADATE);
+                    private AMutableDate aDate = new AMutableDate(0);
+
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        try {
+                            if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else {
+                                if (argOut.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+                                    throw new AlgebricksException(
+                                            "Inapplicable input type for function date_from_datetime: expecting ADateTime ("
+                                                    + SER_DATETIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                    + "), but got: " + argOut.getByteArray()[0]);
+                                }
+                                long datetimeChronon = ADateTimeSerializerDeserializer.getChronon(
+                                        argOut.getByteArray(), 1);
+                                int dateChrononInDays = (int) (datetimeChronon / GregorianCalendarSystem.CHRONON_OF_DAY);
+                                if (dateChrononInDays < 0
+                                        && datetimeChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) {
+                                    dateChrononInDays -= 1;
+                                }
+                                aDate.setValue(dateChrononInDays);
+                                dateSerde.serialize(aDate, out);
+                            }
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java
new file mode 100644
index 0000000..6b78a35
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADate;
+import edu.uci.ics.asterix.om.base.AMutableDate;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DateFromUnixTimeInDaysDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "date-from-unix-time-in-days", 1);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new DateFromUnixTimeInDaysDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    // allowed input types
+                    private byte serNullTypeTag = ATypeTag.NULL.serialize();
+                    private byte serInt8TypeTag = ATypeTag.INT8.serialize();
+                    private byte serInt16TypeTag = ATypeTag.INT16.serialize();
+                    private byte serInt32TypeTag = ATypeTag.INT32.serialize();
+
+                    private AMutableDate aDate = new AMutableDate(0);
+
+                    // possible returning types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADate> dateSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADATE);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        try {
+                            if (argOut.getByteArray()[0] == serNullTypeTag) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else {
+                                if (argOut.getByteArray()[0] == serInt8TypeTag) {
+                                    aDate.setValue(AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1));
+                                } else if (argOut.getByteArray()[0] == serInt16TypeTag) {
+                                    aDate.setValue(AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1));
+                                } else if (argOut.getByteArray()[0] == serInt32TypeTag) {
+                                    aDate.setValue(AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1));
+                                } else {
+                                    throw new AlgebricksException(
+                                            "Inapplicable input type for function date-from-unix-time-in-days: expecting integer or null type, but got "
+                                                    + argOut.getByteArray()[0]);
+                                }
+                                dateSerde.serialize(aDate, out);
+                            }
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java
new file mode 100644
index 0000000..2ade4b7
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADateTime;
+import edu.uci.ics.asterix.om.base.AMutableDateTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DatetimeFromDateAndTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "datetime-from-date-time", 2);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+    private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new DatetimeFromDateAndTimeDescriptor();
+        }
+
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible returning types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADateTime> datetimeSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADATETIME);
+                    private AMutableDateTime aDateTime = new AMutableDateTime(0);
+
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else {
+                                if (argOut0.getByteArray()[0] != SER_DATE_TYPE_TAG) {
+                                    throw new AlgebricksException(
+                                            "Inapplicable input type for function datetime-from-date-time: expecting a Date ("
+                                                    + SER_DATE_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                    + ") for the first parameter, but got: "
+                                                    + argOut0.getByteArray()[0]);
+                                }
+
+                                if (argOut1.getByteArray()[0] != SER_TIME_TYPE_TAG) {
+                                    throw new AlgebricksException(
+                                            "Inapplicable input type for function datetime-from-date-time: expecting a Time ("
+                                                    + SER_TIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                    + ") for the secon parameter, but got: "
+                                                    + argOut1.getByteArray()[0]);
+                                }
+
+                                long datetimeChronon = ADateSerializerDeserializer
+                                        .getChronon(argOut0.getByteArray(), 1)
+                                        * GregorianCalendarSystem.CHRONON_OF_DAY
+                                        + ATimeSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
+
+                                aDateTime.setValue(datetimeChronon);
+                                datetimeSerde.serialize(aDateTime, out);
+                            }
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInMsDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInMsDescriptor.java
new file mode 100644
index 0000000..dbd34f2
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInMsDescriptor.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADateTime;
+import edu.uci.ics.asterix.om.base.AMutableDateTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DatetimeFromUnixTimeInMsDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "datetime-from-unix-time-in-ms", 1);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_INT8_TYPE_TAG = ATypeTag.INT8.serialize();
+    private final static byte SER_INT16_TYPE_TAG = ATypeTag.INT16.serialize();
+    private final static byte SER_INT32_TYPE_TAG = ATypeTag.INT32.serialize();
+    private final static byte SER_INT64_TYPE_TAG = ATypeTag.INT64.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new DatetimeFromUnixTimeInMsDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADateTime> datetimeSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADATETIME);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    private AMutableDateTime aDatetime = new AMutableDateTime(0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        try {
+                            if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else {
+                                if (argOut.getByteArray()[0] == SER_INT8_TYPE_TAG) {
+                                    aDatetime.setValue(AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1));
+                                } else if (argOut.getByteArray()[0] == SER_INT16_TYPE_TAG) {
+                                    aDatetime.setValue(AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1));
+                                } else if (argOut.getByteArray()[0] == SER_INT32_TYPE_TAG) {
+                                    aDatetime.setValue(AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1));
+                                } else if (argOut.getByteArray()[0] == SER_INT64_TYPE_TAG) {
+                                    aDatetime.setValue(AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1));
+                                } else {
+                                    throw new AlgebricksException(
+                                            "Inapplicable input type for function datetime-from-unix-time-in-ms: expecting integer or null type, but got "
+                                                    + argOut.getByteArray()[0]);
+                                }
+                                datetimeSerde.serialize(aDatetime, out);
+                            }
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalAfterDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalAfterDescriptor.java
new file mode 100644
index 0000000..85bb378
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalAfterDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalAfterDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-after",
+            2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IntervalAfterDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+     */
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.after(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalBeforeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalBeforeDescriptor.java
new file mode 100644
index 0000000..2da48ee
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalBeforeDescriptor.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalBeforeDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-before", 2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IntervalBeforeDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.before(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalCoveredByDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalCoveredByDescriptor.java
new file mode 100644
index 0000000..1064e59
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalCoveredByDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalCoveredByDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-covered-by", 2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IntervalCoveredByDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+     */
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.coveredBy(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalCoversDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalCoversDescriptor.java
new file mode 100644
index 0000000..5b1cfa5
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalCoversDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalCoversDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-covers", 2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IntervalCoversDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+     */
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.covers(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalEndedByDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalEndedByDescriptor.java
new file mode 100644
index 0000000..4610c89
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalEndedByDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalEndedByDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-ended-by", 2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IntervalEndedByDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+     */
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.endedBy(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalEndsDecriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalEndsDecriptor.java
new file mode 100644
index 0000000..9853c62
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalEndsDecriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalEndsDecriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-ends",
+            2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IntervalEndsDecriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+     */
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.ends(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalLogic.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalLogic.java
new file mode 100644
index 0000000..e8e814f
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalLogic.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+public class IntervalLogic {
+
+    public static <T extends Comparable<T>> boolean validateInterval(T s, T e) {
+        return s.compareTo(e) <= 0;
+    }
+
+    /**
+     * Anything from interval 1 is less than anything from interval 2.
+     * <p/>
+     * |------|<br/>
+     * &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;|------|<br/>
+     * 
+     * @param s1
+     * @param e1
+     * @param s2
+     * @param e2
+     * @return
+     */
+    public static <T extends Comparable<T>> boolean before(T s1, T e1, T s2, T e2) {
+        return e1.compareTo(s2) < 0;
+    }
+
+    public static <T extends Comparable<T>> boolean after(T s1, T e1, T s2, T e2) {
+        return before(s2, e2, s1, e1);
+    }
+
+    /**
+     * The end of interval 1 is the same as the start of interval 2.
+     * <p/>
+     * |------|<br/>
+     * &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;|------|<br/>
+     * 
+     * @param s1
+     * @param e1
+     * @param s2
+     * @param e2
+     * @return
+     */
+    public static <T extends Comparable<T>> boolean meets(T s1, T e1, T s2, T e2) {
+        return e1.compareTo(s2) == 0;
+    }
+
+    public static <T extends Comparable<T>> boolean metBy(T s1, T e1, T s2, T e2) {
+        return meets(s2, e2, s1, e1);
+    }
+
+    /**
+     * Something at the end of interval 1 is contained as the beginning of interval 2.
+     * <p/>
+     * |------|<br/>
+     * &nbsp;&nbsp;&nbsp;&nbsp;|------|<br/>
+     * 
+     * @param s1
+     * @param e1
+     * @param s2
+     * @param e2
+     * @return
+     */
+    public static <T extends Comparable<T>> boolean overlaps(T s1, T e1, T s2, T e2) {
+        return s1.compareTo(s2) < 0 && e1.compareTo(s2) > 0 && e2.compareTo(e1) > 0;
+    }
+
+    public static <T extends Comparable<T>> boolean overlappedBy(T s1, T e1, T s2, T e2) {
+        return overlaps(s2, e2, s1, e1);
+    }
+
+    /**
+     * Something is shared by both interval 1 and interval 2.
+     * <p/>
+     * 
+     * @param s1
+     * @param e1
+     * @param s2
+     * @param e2
+     * @return
+     */
+    public static <T extends Comparable<T>> boolean overlap(T s1, T e1, T s2, T e2) {
+        return s1.compareTo(e2) < 0 && s2.compareTo(e1) < 0;
+    }
+
+    /**
+     * Anything from interval 1 is contained in the beginning of interval 2.
+     * <p/>
+     * |------|<br/>
+     * |-------|<br/>
+     * 
+     * @param s1
+     * @param e1
+     * @param s2
+     * @param e2
+     * @return
+     */
+    public static <T extends Comparable<T>> boolean starts(T s1, T e1, T s2, T e2) {
+        return s1.compareTo(s2) == 0 && e1.compareTo(e2) <= 0;
+    }
+
+    public static <T extends Comparable<T>> boolean startedBy(T s1, T e1, T s2, T e2) {
+        return starts(s2, e2, s1, e1);
+    }
+
+    /**
+     * Anything from interval 2 is in interval 1.
+     * <p/>
+     * |------|<br/>
+     * &nbsp;&nbsp;|----|<br/>
+     * 
+     * @param s1
+     * @param e1
+     * @param s2
+     * @param e2
+     * @return
+     */
+    public static <T extends Comparable<T>> boolean covers(T s1, T e1, T s2, T e2) {
+        return s1.compareTo(s2) <= 0 && e1.compareTo(e2) >= 0;
+    }
+
+    public static <T extends Comparable<T>> boolean coveredBy(T s1, T e1, T s2, T e2) {
+        return covers(s2, e2, s1, e1);
+    }
+
+    /**
+     * Anything from interval 1 is from the ending part of interval 2.
+     * <p/>
+     * &nbsp;&nbsp;|-----|<br/>
+     * |------|<br/>
+     * 
+     * @param s1
+     * @param e1
+     * @param s2
+     * @param e2
+     * @return
+     */
+    public static <T extends Comparable<T>> boolean ends(T s1, T e1, T s2, T e2) {
+        return s1.compareTo(s2) >= 0 && e1.compareTo(e2) == 0;
+    }
+
+    public static <T extends Comparable<T>> boolean endedBy(T s1, T e1, T s2, T e2) {
+        return ends(s2, e2, s1, e1);
+    }
+
+    public static <T extends Comparable<T>> boolean equals(T s1, T e1, T s2, T e2) {
+        return s1.compareTo(s1) == 0 && e1.compareTo(e2) == 0;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalMeetsDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalMeetsDescriptor.java
new file mode 100644
index 0000000..0263edb
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalMeetsDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalMeetsDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-meets",
+            2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IntervalMeetsDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+     */
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.meets(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalMetByDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalMetByDescriptor.java
new file mode 100644
index 0000000..4cab864
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalMetByDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalMetByDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-met-by", 2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IntervalMetByDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+     */
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.metBy(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalOverlappedByDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalOverlappedByDescriptor.java
new file mode 100644
index 0000000..17e7612
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalOverlappedByDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalOverlappedByDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-overlapped-by", 2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IntervalOverlappedByDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+     */
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.overlappedBy(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalOverlapsDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalOverlapsDescriptor.java
new file mode 100644
index 0000000..ee62711
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalOverlapsDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalOverlapsDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-overlaps", 2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IntervalOverlapsDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+     */
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.overlaps(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalStartedByDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalStartedByDescriptor.java
new file mode 100644
index 0000000..7e5e0fe
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalStartedByDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalStartedByDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-started-by", 2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IntervalStartedByDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+     */
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.startedBy(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalStartsDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalStartsDescriptor.java
new file mode 100644
index 0000000..c2ca32e
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalStartsDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalStartsDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "interval-starts", 2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IntervalStartsDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+     */
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.starts(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/OverlapDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/OverlapDescriptor.java
new file mode 100644
index 0000000..80479cd
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/OverlapDescriptor.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class OverlapDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "overlap", 2);
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new OverlapDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+     */
+    @Override
+    protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+        return IntervalLogic.overlap(s1, e1, s2, e2);
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractDateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractDateDescriptor.java
new file mode 100644
index 0000000..67d8ef3
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractDateDescriptor.java
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADuration;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class SubtractDateDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "subtract-date",
+            2);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new SubtractDateDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADURATION);
+
+                    private AMutableDuration aDuration = new AMutableDuration(0, 0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_DATE_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 0: expecting a Date ("
+                                                + SER_DATE_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                + "), but got: " + argOut0.getByteArray()[0]);
+                            }
+
+                            if (argOut1.getByteArray()[0] != SER_DATE_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 1: expecting a Date ("
+                                                + SER_DATE_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                + "), but got: " + argOut1.getByteArray()[0]);
+                            }
+
+                            long durationChronon = (ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1) - ADateSerializerDeserializer
+                                    .getChronon(argOut1.getByteArray(), 1)) * GregorianCalendarSystem.CHRONON_OF_DAY;
+
+                            aDuration.setValue(0, durationChronon);
+
+                            durationSerde.serialize(aDuration, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractDatetimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractDatetimeDescriptor.java
new file mode 100644
index 0000000..5b77709
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractDatetimeDescriptor.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADuration;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class SubtractDatetimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "subtract-datetime", 2);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new SubtractDatetimeDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADURATION);
+
+                    private AMutableDuration aDuration = new AMutableDuration(0, 0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 0: expecting a DateTime, but got: "
+                                                + argOut0.getByteArray()[0]);
+                            }
+
+                            if (argOut1.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 1: expecting a DateTime, but got: "
+                                                + argOut1.getByteArray()[0]);
+                            }
+
+                            long durationChronon = ADateTimeSerializerDeserializer
+                                    .getChronon(argOut0.getByteArray(), 1)
+                                    - ADateTimeSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
+
+                            aDuration.setValue(0, durationChronon);
+
+                            durationSerde.serialize(aDuration, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractTimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractTimeDescriptor.java
new file mode 100644
index 0000000..323033e
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractTimeDescriptor.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADuration;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class SubtractTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "subtract-time",
+            2);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new SubtractTimeDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADURATION);
+
+                    private AMutableDuration aDuration = new AMutableDuration(0, 0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_TIME_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 0: expecting a Time ("
+                                                + SER_TIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                + "), but got: " + argOut0.getByteArray()[0]);
+                            }
+
+                            if (argOut1.getByteArray()[0] != SER_TIME_TYPE_TAG) {
+                                throw new AlgebricksException(
+                                        "Inapplicable input type for parameter 1: expecting a Time ("
+                                                + SER_TIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                + "), but got: " + argOut1.getByteArray()[0]);
+                            }
+
+                            int durationChronon = ATimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1)
+                                    - ATimeSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
+
+                            aDuration.setValue(0, durationChronon);
+
+                            durationSerde.serialize(aDuration, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java
new file mode 100644
index 0000000..b1053a4
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AMutableTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.ATime;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TimeFromDatetimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "time-from-datetime", 1);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new TimeFromDatetimeDescriptor();
+        }
+
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    // possible returning types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ATime> timeSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ATIME);
+                    private AMutableTime aTime = new AMutableTime(0);
+
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        try {
+                            if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else {
+                                if (argOut.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+                                    throw new AlgebricksException(
+                                            "Inapplicable input type for function time-from-datetime: expecting a DataTime ("
+                                                    + SER_DATETIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+                                                    + "), but got: " + argOut.getByteArray()[0]);
+                                }
+                                long datetimeChronon = ADateTimeSerializerDeserializer.getChronon(
+                                        argOut.getByteArray(), 1);
+                                int timeChronon = (int) (datetimeChronon % GregorianCalendarSystem.CHRONON_OF_DAY);
+                                if (timeChronon < 0) {
+                                    timeChronon += GregorianCalendarSystem.CHRONON_OF_DAY;
+                                }
+                                aTime.setValue(timeChronon);
+                                timeSerde.serialize(aTime, out);
+                            }
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java
new file mode 100644
index 0000000..b3fbc0e
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AMutableTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.ATime;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TimeFromUnixTimeInMsDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "time-from-unix-time-in-ms", 1);
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_INT8_TYPE_TAG = ATypeTag.INT8.serialize();
+    private final static byte SER_INT16_TYPE_TAG = ATypeTag.INT16.serialize();
+    private final static byte SER_INT32_TYPE_TAG = ATypeTag.INT32.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new TimeFromUnixTimeInMsDescriptor();
+        }
+    };
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+     */
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ATime> timeSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ATIME);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    private AMutableTime aTime = new AMutableTime(0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        try {
+                            if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else {
+                                if (argOut.getByteArray()[0] == SER_INT8_TYPE_TAG) {
+                                    aTime.setValue(AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1));
+                                } else if (argOut.getByteArray()[0] == SER_INT16_TYPE_TAG) {
+                                    aTime.setValue(AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1));
+                                } else if (argOut.getByteArray()[0] == SER_INT32_TYPE_TAG) {
+                                    aTime.setValue(AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1));
+                                } else {
+                                    throw new AlgebricksException(
+                                            "Inapplicable input type for function time-from-unix-time-in-ms: expecting integer or null type, but got "
+                                                    + argOut.getByteArray()[0]);
+                                }
+                                timeSerde.serialize(aTime, out);
+                            }
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
index 41ca4ed..a334893 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
@@ -66,6 +66,13 @@
 import edu.uci.ics.asterix.runtime.aggregates.std.SumAggregateDescriptor;
 import edu.uci.ics.asterix.runtime.aggregates.stream.EmptyStreamAggregateDescriptor;
 import edu.uci.ics.asterix.runtime.aggregates.stream.NonEmptyStreamAggregateDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalDayAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalHourAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalMillisecondAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalMinuteAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalMonthAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalSecondAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalYearAccessor;
 import edu.uci.ics.asterix.runtime.evaluators.accessors.CircleCenterAccessor;
 import edu.uci.ics.asterix.runtime.evaluators.accessors.CircleRadiusAccessor;
 import edu.uci.ics.asterix.runtime.evaluators.accessors.LineRectanglePolygonAccessor;
@@ -85,6 +92,12 @@
 import edu.uci.ics.asterix.runtime.evaluators.constructors.AInt32ConstructorDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.constructors.AInt64ConstructorDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.constructors.AInt8ConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.constructors.AIntervalFromDateConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.constructors.AIntervalFromDateTimeConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.constructors.AIntervalFromTimeConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.constructors.AIntervalStartFromDateConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.constructors.AIntervalStartFromDateTimeConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.constructors.AIntervalStartFromTimeConstructorDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.constructors.ALineConstructorDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.constructors.ANullConstructorDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.constructors.APoint3DConstructorDescriptor;
@@ -154,6 +167,10 @@
 import edu.uci.ics.asterix.runtime.evaluators.functions.SpatialDistanceDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.SpatialIntersectDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.StartsWithDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.SubstringDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.SwitchCaseDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.UnorderedListConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.WordTokensDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.StringConcatDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.StringEndWithDescrtiptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.StringEqualDescriptor;
@@ -169,11 +186,38 @@
 import edu.uci.ics.asterix.runtime.evaluators.functions.Substring2Descriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.SubstringAfterDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.SubstringBeforeDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.SubstringDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.SwitchCaseDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.UnorderedListConstructorDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.WordTokensDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.YearDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AddDateDurationDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AddDatetimeDurationDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AddTimeDurationDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AdjustDateTimeForTimeZoneDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AdjustTimeForTimeZoneDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.CalendarDuartionFromDateDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.CalendarDurationFromDateTimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.CurrentDateDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.CurrentDateTimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.CurrentTimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DateFromDatetimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DateFromUnixTimeInDaysDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DatetimeFromDateAndTimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DatetimeFromUnixTimeInMsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalAfterDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalBeforeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalCoveredByDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalCoversDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalEndedByDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalEndsDecriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalMeetsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalMetByDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.OverlapDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalOverlappedByDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalOverlapsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalStartedByDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalStartsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.SubtractDateDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.SubtractDatetimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.SubtractTimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.TimeFromDatetimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.TimeFromUnixTimeInMsDescriptor;
 import edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory;
 import edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory;
 import edu.uci.ics.asterix.runtime.runningaggregates.std.TidRunningAggregateDescriptor;
@@ -267,22 +311,21 @@
 		temp.add(SubstringDescriptor.FACTORY);
 		temp.add(TidRunningAggregateDescriptor.FACTORY);
 
-		// format-dependent
-		temp.add(AndDescriptor.FACTORY);
-		temp.add(OrDescriptor.FACTORY);
-		temp.add(LikeDescriptor.FACTORY);
-		temp.add(YearDescriptor.FACTORY);
-		temp.add(ScanCollectionDescriptor.FACTORY);
-		temp.add(AnyCollectionMemberDescriptor.FACTORY);
-		temp.add(ClosedRecordConstructorDescriptor.FACTORY);
-		temp.add(FieldAccessByIndexDescriptor.FACTORY);
-		temp.add(FieldAccessByNameDescriptor.FACTORY);
-		temp.add(GetItemDescriptor.FACTORY);
-		temp.add(NumericUnaryMinusDescriptor.FACTORY);
-		temp.add(OpenRecordConstructorDescriptor.FACTORY);
-		temp.add(OrderedListConstructorDescriptor.FACTORY);
-		temp.add(UnorderedListConstructorDescriptor.FACTORY);
-		temp.add(EmbedTypeDescriptor.FACTORY);
+        // format-dependent
+        temp.add(AndDescriptor.FACTORY);
+        temp.add(OrDescriptor.FACTORY);
+        temp.add(LikeDescriptor.FACTORY);
+        temp.add(ScanCollectionDescriptor.FACTORY);
+        temp.add(AnyCollectionMemberDescriptor.FACTORY);
+        temp.add(ClosedRecordConstructorDescriptor.FACTORY);
+        temp.add(FieldAccessByIndexDescriptor.FACTORY);
+        temp.add(FieldAccessByNameDescriptor.FACTORY);
+        temp.add(GetItemDescriptor.FACTORY);
+        temp.add(NumericUnaryMinusDescriptor.FACTORY);
+        temp.add(OpenRecordConstructorDescriptor.FACTORY);
+        temp.add(OrderedListConstructorDescriptor.FACTORY);
+        temp.add(UnorderedListConstructorDescriptor.FACTORY);
+        temp.add(EmbedTypeDescriptor.FACTORY);
 
 		temp.add(NumericAddDescriptor.FACTORY);
 		temp.add(NumericDivideDescriptor.FACTORY);
@@ -417,12 +460,63 @@
 		temp.add(CastRecordDescriptor.FACTORY);
 		temp.add(NotNullDescriptor.FACTORY);
 
-		IFunctionManager mgr = new FunctionManagerImpl();
-		for (IFunctionDescriptorFactory fdFactory : temp) {
-			mgr.registerFunction(fdFactory);
-		}
-		FunctionManagerHolder.setFunctionManager(mgr);
-	}
+        // Spatial and temporal type accessors
+        temp.add(TemporalYearAccessor.FACTORY);
+        temp.add(TemporalMonthAccessor.FACTORY);
+        temp.add(TemporalDayAccessor.FACTORY);
+        temp.add(TemporalHourAccessor.FACTORY);
+        temp.add(TemporalMinuteAccessor.FACTORY);
+        temp.add(TemporalSecondAccessor.FACTORY);
+        temp.add(TemporalMillisecondAccessor.FACTORY);
+
+        // Temporal functions
+        temp.add(DateFromUnixTimeInDaysDescriptor.FACTORY);
+        temp.add(DateFromDatetimeDescriptor.FACTORY);
+        temp.add(AddDateDurationDescriptor.FACTORY);
+        temp.add(SubtractDateDescriptor.FACTORY);
+        temp.add(TimeFromUnixTimeInMsDescriptor.FACTORY);
+        temp.add(TimeFromDatetimeDescriptor.FACTORY);
+        temp.add(SubtractTimeDescriptor.FACTORY);
+        temp.add(AddTimeDurationDescriptor.FACTORY);
+        temp.add(DatetimeFromUnixTimeInMsDescriptor.FACTORY);
+        temp.add(DatetimeFromDateAndTimeDescriptor.FACTORY);
+        temp.add(SubtractDatetimeDescriptor.FACTORY);
+        temp.add(AddDatetimeDurationDescriptor.FACTORY);
+        temp.add(CalendarDurationFromDateTimeDescriptor.FACTORY);
+        temp.add(CalendarDuartionFromDateDescriptor.FACTORY);
+        temp.add(AdjustDateTimeForTimeZoneDescriptor.FACTORY);
+        temp.add(AdjustTimeForTimeZoneDescriptor.FACTORY);
+        temp.add(IntervalBeforeDescriptor.FACTORY);
+        temp.add(IntervalAfterDescriptor.FACTORY);
+        temp.add(IntervalMeetsDescriptor.FACTORY);
+        temp.add(IntervalMetByDescriptor.FACTORY);
+        temp.add(IntervalOverlapsDescriptor.FACTORY);
+        temp.add(IntervalOverlappedByDescriptor.FACTORY);
+        temp.add(OverlapDescriptor.FACTORY);
+        temp.add(IntervalStartsDescriptor.FACTORY);
+        temp.add(IntervalStartedByDescriptor.FACTORY);
+        temp.add(IntervalCoversDescriptor.FACTORY);
+        temp.add(IntervalCoveredByDescriptor.FACTORY);
+        temp.add(IntervalEndsDecriptor.FACTORY);
+        temp.add(IntervalEndedByDescriptor.FACTORY);
+        temp.add(CurrentDateDescriptor.FACTORY);
+        temp.add(CurrentTimeDescriptor.FACTORY);
+        temp.add(CurrentDateTimeDescriptor.FACTORY);
+
+        // Interval constructor
+        temp.add(AIntervalFromDateConstructorDescriptor.FACTORY);
+        temp.add(AIntervalFromTimeConstructorDescriptor.FACTORY);
+        temp.add(AIntervalFromDateTimeConstructorDescriptor.FACTORY);
+        temp.add(AIntervalStartFromDateConstructorDescriptor.FACTORY);
+        temp.add(AIntervalStartFromDateTimeConstructorDescriptor.FACTORY);
+        temp.add(AIntervalStartFromTimeConstructorDescriptor.FACTORY);
+
+        IFunctionManager mgr = new FunctionManagerImpl();
+        for (IFunctionDescriptorFactory fdFactory : temp) {
+            mgr.registerFunction(fdFactory);
+        }
+        FunctionManagerHolder.setFunctionManager(mgr);
+    }
 
 	@Override
 	public IBinaryBooleanInspectorFactory getBinaryBooleanInspectorFactory() {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java
index 8606088..2e64ad4 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java
@@ -22,10 +22,8 @@
 import java.util.List;
 import java.util.Queue;
 
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexer;
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexerConstants;
-import edu.uci.ics.asterix.adm.parser.nontagged.ParseException;
-import edu.uci.ics.asterix.adm.parser.nontagged.Token;
+import edu.uci.ics.asterix.runtime.operators.file.adm.AdmLexer;
+import edu.uci.ics.asterix.runtime.operators.file.adm.AdmLexerException;
 import edu.uci.ics.asterix.builders.IARecordBuilder;
 import edu.uci.ics.asterix.builders.IAsterixListBuilder;
 import edu.uci.ics.asterix.builders.OrderedListBuilder;
@@ -36,6 +34,7 @@
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AIntervalSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APoint3DSerializerDeserializer;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APointSerializerDeserializer;
@@ -55,7 +54,7 @@
 import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
 
 /**
- * Parser for ADM formatted data. 
+ * Parser for ADM formatted data.
  */
 public class ADMDataParser extends AbstractDataParser implements IDataParser {
 
@@ -82,21 +81,25 @@
     }
 
     @Override
-    public void initialize(InputStream in, ARecordType recordType, boolean datasetRec) {
-        admLexer = new AdmLexer(in);
+    public void initialize(InputStream in, ARecordType recordType, boolean datasetRec) throws AsterixException {
         this.recordType = recordType;
         this.datasetRec = datasetRec;
+        try {
+            admLexer = new AdmLexer(new java.io.InputStreamReader(in));
+        } catch (IOException e) {
+            throw new AsterixException(e);
+        }
     }
 
     protected boolean parseAdmInstance(IAType objectType, boolean datasetRec, DataOutput out) throws AsterixException,
             IOException {
-        Token token;
+        int token;
         try {
             token = admLexer.next();
-        } catch (ParseException pe) {
-            throw new AsterixException(pe);
+        } catch (AdmLexerException e) {
+            throw new AsterixException(e);
         }
-        if (token.kind == AdmLexerConstants.EOF) {
+        if (token == AdmLexer.TOKEN_EOF) {
             return false;
         } else {
             admFromLexerStream(token, objectType, out, datasetRec);
@@ -104,157 +107,212 @@
         }
     }
 
-    private void admFromLexerStream(Token token, IAType objectType, DataOutput out, Boolean datasetRec)
+    private void admFromLexerStream(int token, IAType objectType, DataOutput out, Boolean datasetRec)
             throws AsterixException, IOException {
 
-        switch (token.kind) {
-            case AdmLexerConstants.NULL_LITERAL: {
+        switch (token) {
+            case AdmLexer.TOKEN_NULL_LITERAL: {
                 if (checkType(ATypeTag.NULL, objectType, out)) {
                     nullSerde.serialize(ANull.NULL, out);
                 } else
                     throw new AsterixException(" This field can not be null ");
                 break;
             }
-            case AdmLexerConstants.TRUE_LITERAL: {
+            case AdmLexer.TOKEN_TRUE_LITERAL: {
                 if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
                     booleanSerde.serialize(ABoolean.TRUE, out);
                 } else
                     throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
                 break;
             }
-            case AdmLexerConstants.BOOLEAN_CONS: {
+            case AdmLexer.TOKEN_BOOLEAN_CONS: {
                 parseConstructor(ATypeTag.BOOLEAN, objectType, out);
                 break;
             }
-            case AdmLexerConstants.FALSE_LITERAL: {
+            case AdmLexer.TOKEN_FALSE_LITERAL: {
                 if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
                     booleanSerde.serialize(ABoolean.FALSE, out);
                 } else
                     throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
                 break;
             }
-            case AdmLexerConstants.DOUBLE_LITERAL: {
+            case AdmLexer.TOKEN_DOUBLE_LITERAL: {
                 if (checkType(ATypeTag.DOUBLE, objectType, out)) {
-                    aDouble.setValue(Double.parseDouble(token.image));
+                    aDouble.setValue(Double.parseDouble(admLexer.getLastTokenImage()));
                     doubleSerde.serialize(aDouble, out);
                 } else
                     throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
                 break;
             }
-            case AdmLexerConstants.DOUBLE_CONS: {
+            case AdmLexer.TOKEN_DOUBLE_CONS: {
                 parseConstructor(ATypeTag.DOUBLE, objectType, out);
                 break;
             }
-            case AdmLexerConstants.FLOAT_LITERAL: {
+            case AdmLexer.TOKEN_FLOAT_LITERAL: {
                 if (checkType(ATypeTag.FLOAT, objectType, out)) {
-                    aFloat.setValue(Float.parseFloat(token.image));
+                    aFloat.setValue(Float.parseFloat(admLexer.getLastTokenImage()));
                     floatSerde.serialize(aFloat, out);
                 } else
                     throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
                 break;
             }
-            case AdmLexerConstants.FLOAT_CONS: {
+            case AdmLexer.TOKEN_FLOAT_CONS: {
                 parseConstructor(ATypeTag.FLOAT, objectType, out);
                 break;
             }
-            case AdmLexerConstants.INT8_LITERAL: {
+            case AdmLexer.TOKEN_INT8_LITERAL: {
                 if (checkType(ATypeTag.INT8, objectType, out)) {
-                    parseInt8(token.image, out);
+                    parseInt8(admLexer.getLastTokenImage(), out);
                 } else
                     throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
                 break;
             }
-            case AdmLexerConstants.INT8_CONS: {
+            case AdmLexer.TOKEN_INT8_CONS: {
                 parseConstructor(ATypeTag.INT8, objectType, out);
                 break;
             }
-            case AdmLexerConstants.INT16_LITERAL: {
+            case AdmLexer.TOKEN_INT16_LITERAL: {
                 if (checkType(ATypeTag.INT16, objectType, out)) {
-                    parseInt16(token.image, out);
+                    parseInt16(admLexer.getLastTokenImage(), out);
                 } else
                     throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
                 break;
             }
-            case AdmLexerConstants.INT16_CONS: {
+            case AdmLexer.TOKEN_INT16_CONS: {
                 parseConstructor(ATypeTag.INT16, objectType, out);
                 break;
             }
-            case AdmLexerConstants.INT_LITERAL:
-            case AdmLexerConstants.INT32_LITERAL: {
+            case AdmLexer.TOKEN_INT_LITERAL:
+            case AdmLexer.TOKEN_INT32_LITERAL: {
                 if (checkType(ATypeTag.INT32, objectType, out)) {
-                    parseInt32(token.image, out);
+                    parseInt32(admLexer.getLastTokenImage(), out);
                 } else
                     throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
                 break;
             }
-            case AdmLexerConstants.INT32_CONS: {
+            case AdmLexer.TOKEN_INT32_CONS: {
                 parseConstructor(ATypeTag.INT32, objectType, out);
                 break;
             }
-            case AdmLexerConstants.INT64_LITERAL: {
+            case AdmLexer.TOKEN_INT64_LITERAL: {
                 if (checkType(ATypeTag.INT64, objectType, out)) {
-                    parseInt64(token.image, out);
+                    parseInt64(admLexer.getLastTokenImage(), out);
                 } else
                     throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
                 break;
             }
-            case AdmLexerConstants.INT64_CONS: {
+            case AdmLexer.TOKEN_INT64_CONS: {
                 parseConstructor(ATypeTag.INT64, objectType, out);
                 break;
             }
-            case AdmLexerConstants.STRING_LITERAL: {
+            case AdmLexer.TOKEN_STRING_LITERAL: {
                 if (checkType(ATypeTag.STRING, objectType, out)) {
-                    aString.setValue(token.image.substring(1, token.image.length() - 1));
+                    aString.setValue(admLexer.getLastTokenImage().substring(1,
+                            admLexer.getLastTokenImage().length() - 1));
                     stringSerde.serialize(aString, out);
                 } else
                     throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
                 break;
             }
-            case AdmLexerConstants.STRING_CONS: {
+            case AdmLexer.TOKEN_STRING_CONS: {
                 parseConstructor(ATypeTag.STRING, objectType, out);
                 break;
             }
-            case AdmLexerConstants.DATE_CONS: {
+            case AdmLexer.TOKEN_DATE_CONS: {
                 parseConstructor(ATypeTag.DATE, objectType, out);
                 break;
             }
-            case AdmLexerConstants.TIME_CONS: {
+            case AdmLexer.TOKEN_TIME_CONS: {
                 parseConstructor(ATypeTag.TIME, objectType, out);
                 break;
             }
-            case AdmLexerConstants.DATETIME_CONS: {
+            case AdmLexer.TOKEN_DATETIME_CONS: {
                 parseConstructor(ATypeTag.DATETIME, objectType, out);
                 break;
             }
-            case AdmLexerConstants.DURATION_CONS: {
+            case AdmLexer.TOKEN_INTERVAL_DATE_CONS: {
+                try {
+                    if (checkType(ATypeTag.INTERVAL, objectType, out)) {
+                        if (admLexer.next() == AdmLexer.TOKEN_CONSTRUCTOR_OPEN) {
+                            if (admLexer.next() == AdmLexer.TOKEN_STRING_CONS) {
+                                AIntervalSerializerDeserializer.parseDate(admLexer.getLastTokenImage(), out);
+
+                                if (admLexer.next() == AdmLexer.TOKEN_CONSTRUCTOR_CLOSE) {
+                                    break;
+                                }
+                            }
+                        }
+                    }
+                } catch (AdmLexerException ex) {
+                    throw new AsterixException(ex);
+                }
+                throw new AsterixException("Wrong interval data parsing for date interval.");
+            }
+            case AdmLexer.TOKEN_INTERVAL_TIME_CONS: {
+                try {
+                    if (checkType(ATypeTag.INTERVAL, objectType, out)) {
+                        if (admLexer.next() == AdmLexer.TOKEN_CONSTRUCTOR_OPEN) {
+                            if (admLexer.next() == AdmLexer.TOKEN_STRING_CONS) {
+                                AIntervalSerializerDeserializer.parseTime(admLexer.getLastTokenImage(), out);
+
+                                if (admLexer.next() == AdmLexer.TOKEN_CONSTRUCTOR_CLOSE) {
+                                    break;
+                                }
+                            }
+                        }
+                    }
+                } catch (AdmLexerException ex) {
+                    throw new AsterixException(ex);
+                }
+                throw new AsterixException("Wrong interval data parsing for time interval.");
+            }
+            case AdmLexer.TOKEN_INTERVAL_DATETIME_CONS: {
+                try {
+                    if (checkType(ATypeTag.INTERVAL, objectType, out)) {
+                        if (admLexer.next() == AdmLexer.TOKEN_CONSTRUCTOR_OPEN) {
+                            if (admLexer.next() == AdmLexer.TOKEN_STRING_CONS) {
+                                AIntervalSerializerDeserializer.parseDatetime(admLexer.getLastTokenImage(), out);
+
+                                if (admLexer.next() == AdmLexer.TOKEN_CONSTRUCTOR_CLOSE) {
+                                    break;
+                                }
+                            }
+                        }
+                    }
+                } catch (AdmLexerException ex) {
+                    throw new AsterixException(ex);
+                }
+                throw new AsterixException("Wrong interval data parsing for datetime interval.");
+            }
+            case AdmLexer.TOKEN_DURATION_CONS: {
                 parseConstructor(ATypeTag.DURATION, objectType, out);
                 break;
             }
-            case AdmLexerConstants.POINT_CONS: {
+            case AdmLexer.TOKEN_POINT_CONS: {
                 parseConstructor(ATypeTag.POINT, objectType, out);
                 break;
             }
-            case AdmLexerConstants.POINT3D_CONS: {
+            case AdmLexer.TOKEN_POINT3D_CONS: {
                 parseConstructor(ATypeTag.POINT3D, objectType, out);
                 break;
             }
-            case AdmLexerConstants.CIRCLE_CONS: {
+            case AdmLexer.TOKEN_CIRCLE_CONS: {
                 parseConstructor(ATypeTag.CIRCLE, objectType, out);
                 break;
             }
-            case AdmLexerConstants.RECTANGLE_CONS: {
+            case AdmLexer.TOKEN_RECTANGLE_CONS: {
                 parseConstructor(ATypeTag.RECTANGLE, objectType, out);
                 break;
             }
-            case AdmLexerConstants.LINE_CONS: {
+            case AdmLexer.TOKEN_LINE_CONS: {
                 parseConstructor(ATypeTag.LINE, objectType, out);
                 break;
             }
-            case AdmLexerConstants.POLYGON_CONS: {
+            case AdmLexer.TOKEN_POLYGON_CONS: {
                 parseConstructor(ATypeTag.POLYGON, objectType, out);
                 break;
             }
-            case AdmLexerConstants.START_UNORDERED_LIST: {
+            case AdmLexer.TOKEN_START_UNORDERED_LIST: {
                 if (checkType(ATypeTag.UNORDEREDLIST, objectType, out)) {
                     objectType = getComplexType(objectType, ATypeTag.UNORDEREDLIST);
                     parseUnorderedList((AUnorderedListType) objectType, out);
@@ -263,7 +321,7 @@
                 break;
             }
 
-            case AdmLexerConstants.START_ORDERED_LIST: {
+            case AdmLexer.TOKEN_START_ORDERED_LIST: {
                 if (checkType(ATypeTag.ORDEREDLIST, objectType, out)) {
                     objectType = getComplexType(objectType, ATypeTag.ORDEREDLIST);
                     parseOrderedList((AOrderedListType) objectType, out);
@@ -271,7 +329,7 @@
                     throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
                 break;
             }
-            case AdmLexerConstants.START_RECORD: {
+            case AdmLexer.TOKEN_START_RECORD: {
                 if (checkType(ATypeTag.RECORD, objectType, out)) {
                     objectType = getComplexType(objectType, ATypeTag.RECORD);
                     parseRecord((ARecordType) objectType, out, datasetRec);
@@ -279,11 +337,11 @@
                     throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
                 break;
             }
-            case AdmLexerConstants.EOF: {
+            case AdmLexer.TOKEN_EOF: {
                 break;
             }
             default: {
-                throw new AsterixException("Unexpected ADM token kind: " + admLexer.tokenKindToString(token.kind) + ".");
+                throw new AsterixException("Unexpected ADM token kind: " + AdmLexer.tokenKindToString(token) + ".");
             }
         }
     }
@@ -365,7 +423,7 @@
             recBuilder.reset(null);
 
         recBuilder.init();
-        Token token = null;
+        int token;
         boolean inRecord = true;
         boolean expectingRecordField = false;
         boolean first = true;
@@ -375,15 +433,15 @@
         IAType fieldType = null;
         do {
             token = nextToken();
-            switch (token.kind) {
-                case AdmLexerConstants.END_RECORD: {
+            switch (token) {
+                case AdmLexer.TOKEN_END_RECORD: {
                     if (expectingRecordField) {
                         throw new AsterixException("Found END_RECORD while expecting a record field.");
                     }
                     inRecord = false;
                     break;
                 }
-                case AdmLexerConstants.STRING_LITERAL: {
+                case AdmLexer.TOKEN_STRING_LITERAL: {
                     // we've read the name of the field
                     // now read the content
                     fieldNameBuffer.reset();
@@ -391,12 +449,14 @@
                     expectingRecordField = false;
 
                     if (recType != null) {
-                        String fldName = token.image.substring(1, token.image.length() - 1);
+                        String fldName = admLexer.getLastTokenImage().substring(1,
+                                admLexer.getLastTokenImage().length() - 1);
                         fieldId = recBuilder.getFieldId(fldName);
                         if (fieldId < 0 && !recType.isOpen()) {
                             throw new AsterixException("This record is closed, you can not add extra fields !!");
                         } else if (fieldId < 0 && recType.isOpen()) {
-                            aStringFieldName.setValue(token.image.substring(1, token.image.length() - 1));
+                            aStringFieldName.setValue(admLexer.getLastTokenImage().substring(1,
+                                    admLexer.getLastTokenImage().length() - 1));
                             stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
                             openRecordField = true;
                             fieldType = null;
@@ -407,16 +467,17 @@
                             openRecordField = false;
                         }
                     } else {
-                        aStringFieldName.setValue(token.image.substring(1, token.image.length() - 1));
+                        aStringFieldName.setValue(admLexer.getLastTokenImage().substring(1,
+                                admLexer.getLastTokenImage().length() - 1));
                         stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
                         openRecordField = true;
                         fieldType = null;
                     }
 
                     token = nextToken();
-                    if (token.kind != AdmLexerConstants.COLON) {
-                        throw new AsterixException("Unexpected ADM token kind: "
-                                + admLexer.tokenKindToString(token.kind) + " while expecting \":\".");
+                    if (token != AdmLexer.TOKEN_COLON) {
+                        throw new AsterixException("Unexpected ADM token kind: " + AdmLexer.tokenKindToString(token)
+                                + " while expecting \":\".");
                     }
 
                     token = nextToken();
@@ -436,7 +497,7 @@
 
                     break;
                 }
-                case AdmLexerConstants.COMMA: {
+                case AdmLexer.TOKEN_COMMA: {
                     if (first) {
                         throw new AsterixException("Found COMMA before any record field.");
                     }
@@ -447,7 +508,7 @@
                     break;
                 }
                 default: {
-                    throw new AsterixException("Unexpected ADM token kind: " + admLexer.tokenKindToString(token.kind)
+                    throw new AsterixException("Unexpected ADM token kind: " + AdmLexer.tokenKindToString(token)
                             + " while parsing record fields.");
                 }
             }
@@ -498,18 +559,18 @@
             itemType = oltype.getItemType();
         orderedListBuilder.reset(oltype);
 
-        Token token = null;
+        int token;
         boolean inList = true;
         boolean expectingListItem = false;
         boolean first = true;
         do {
             token = nextToken();
-            if (token.kind == AdmLexerConstants.END_ORDERED_LIST) {
+            if (token == AdmLexer.TOKEN_END_ORDERED_LIST) {
                 if (expectingListItem) {
                     throw new AsterixException("Found END_COLLECTION while expecting a list item.");
                 }
                 inList = false;
-            } else if (token.kind == AdmLexerConstants.COMMA) {
+            } else if (token == AdmLexer.TOKEN_COMMA) {
                 if (first) {
                     throw new AsterixException("Found COMMA before any list item.");
                 }
@@ -542,18 +603,18 @@
             itemType = uoltype.getItemType();
         unorderedListBuilder.reset(uoltype);
 
-        Token token = null;
+        int token;
         boolean inList = true;
         boolean expectingListItem = false;
         boolean first = true;
         do {
             token = nextToken();
-            if (token.kind == AdmLexerConstants.END_UNORDERED_LIST) {
+            if (token == AdmLexer.TOKEN_END_UNORDERED_LIST) {
                 if (expectingListItem) {
                     throw new AsterixException("Found END_COLLECTION while expecting a list item.");
                 }
                 inList = false;
-            } else if (token.kind == AdmLexerConstants.COMMA) {
+            } else if (token == AdmLexer.TOKEN_COMMA) {
                 if (first) {
                     throw new AsterixException("Found COMMA before any list item.");
                 }
@@ -574,11 +635,13 @@
         returnTempBuffer(itemBuffer);
     }
 
-    private Token nextToken() throws AsterixException {
+    private int nextToken() throws AsterixException {
         try {
             return admLexer.next();
-        } catch (ParseException pe) {
-            throw new AsterixException(pe);
+        } catch (AdmLexerException e) {
+            throw new AsterixException(e);
+        } catch (IOException e) {
+            throw new AsterixException(e);
         }
     }
 
@@ -633,73 +696,109 @@
 
     private void parseConstructor(ATypeTag typeTag, IAType objectType, DataOutput out) throws AsterixException {
         try {
-            Token token = admLexer.next();
-            if (token.kind == AdmLexerConstants.CONSTRUCTOR_OPEN) {
+            int token = admLexer.next();
+            if (token == AdmLexer.TOKEN_CONSTRUCTOR_OPEN) {
                 if (checkType(typeTag, objectType, out)) {
                     token = admLexer.next();
-                    if (token.kind == AdmLexerConstants.STRING_LITERAL) {
+                    if (token == AdmLexer.TOKEN_STRING_LITERAL) {
                         switch (typeTag) {
                             case BOOLEAN:
-                                parseBoolean(token.image.substring(1, token.image.length() - 1), out);
+                                parseBoolean(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case INT8:
-                                parseInt8(token.image.substring(1, token.image.length() - 1), out);
+                                parseInt8(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case INT16:
-                                parseInt16(token.image.substring(1, token.image.length() - 1), out);
+                                parseInt16(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case INT32:
-                                parseInt32(token.image.substring(1, token.image.length() - 1), out);
+                                parseInt32(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case INT64:
-                                parseInt64(token.image.substring(1, token.image.length() - 1), out);
+                                parseInt64(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case FLOAT:
-                                aFloat.setValue(Float.parseFloat(token.image.substring(1, token.image.length() - 1)));
+                                aFloat.setValue(Float.parseFloat(admLexer.getLastTokenImage().substring(1,
+                                        admLexer.getLastTokenImage().length() - 1)));
                                 floatSerde.serialize(aFloat, out);
                                 break;
                             case DOUBLE:
-                                aDouble.setValue(Double.parseDouble(token.image.substring(1, token.image.length() - 1)));
+                                aDouble.setValue(Double.parseDouble(admLexer.getLastTokenImage().substring(1,
+                                        admLexer.getLastTokenImage().length() - 1)));
                                 doubleSerde.serialize(aDouble, out);
                                 break;
                             case STRING:
-                                aString.setValue(token.image.substring(1, token.image.length() - 1));
+                                aString.setValue(admLexer.getLastTokenImage().substring(1,
+                                        admLexer.getLastTokenImage().length() - 1));
                                 stringSerde.serialize(aString, out);
                                 break;
                             case TIME:
-                                parseTime(token.image.substring(1, token.image.length() - 1), out);
+                                parseTime(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case DATE:
-                                parseDate(token.image.substring(1, token.image.length() - 1), out);
+                                parseDate(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case DATETIME:
-                                parseDatetime(token.image.substring(1, token.image.length() - 1), out);
+                                parseDatetime(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case DURATION:
-                                parseDuration(token.image.substring(1, token.image.length() - 1), out);
+                                parseDuration(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case POINT:
-                                parsePoint(token.image.substring(1, token.image.length() - 1), out);
+                                parsePoint(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case POINT3D:
-                                parsePoint3d(token.image.substring(1, token.image.length() - 1), out);
+                                parsePoint3d(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case CIRCLE:
-                                parseCircle(token.image.substring(1, token.image.length() - 1), out);
+                                parseCircle(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case RECTANGLE:
-                                parseRectangle(token.image.substring(1, token.image.length() - 1), out);
+                                parseRectangle(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case LINE:
-                                parseLine(token.image.substring(1, token.image.length() - 1), out);
+                                parseLine(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
                             case POLYGON:
-                                parsePolygon(token.image.substring(1, token.image.length() - 1), out);
+                                parsePolygon(
+                                        admLexer.getLastTokenImage().substring(1,
+                                                admLexer.getLastTokenImage().length() - 1), out);
                                 break;
+                            default:
+                                throw new AsterixException("Missing deserializer method for constructor: "
+                                        + AdmLexer.tokenKindToString(token) + ".");
 
                         }
                         token = admLexer.next();
-                        if (token.kind == AdmLexerConstants.CONSTRUCTOR_CLOSE)
+                        if (token == AdmLexer.TOKEN_CONSTRUCTOR_CLOSE)
                             return;
                     }
                 }
diff --git a/asterix-runtime/src/main/javacc/AdmLexer.jj b/asterix-runtime/src/main/javacc/AdmLexer.jj
deleted file mode 100644
index fbab62f..0000000
--- a/asterix-runtime/src/main/javacc/AdmLexer.jj
+++ /dev/null
@@ -1,150 +0,0 @@
-options {
-
-	  
-       STATIC = false;
-	
-}
-
-PARSER_BEGIN(AdmLexer)
-
-package edu.uci.ics.asterix.adm.parser;
-
-import java.io.*;
-
-public class AdmLexer {
-
-	public static void main(String args[]) throws ParseException, TokenMgrError, IOException, FileNotFoundException {
-	   	File file = new File(args[0]);
-		Reader freader =  new BufferedReader(new InputStreamReader 
-		         (new FileInputStream(file), "UTF-8"));		
-		AdmLexer flexer = new AdmLexer(freader);
-		Token t = null;
-		do {
-		   t = flexer.next();		   
-		   System.out.println(AdmLexerConstants.tokenImage[t.kind]);
-		} while (t.kind != EOF);
-	    freader.close();
-	}
-	
-	public Token next() throws ParseException {
-	   return getNextToken();
-	}
-	
-	public String tokenKindToString(int tokenKind) {
-	   return AdmLexerConstants.tokenImage[tokenKind];
-	}
-}
-
-PARSER_END(AdmLexer)
-
-<DEFAULT>
-TOKEN :
-{
-	<NULL_LITERAL : "null">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<TRUE_LITERAL : "true">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<FALSE_LITERAL : "false">
-}
-
-
-<DEFAULT>
-TOKEN :
-{
-	<INTEGER_LITERAL : ("-")? (<DIGIT>)+ >
-}
-
-
-<DEFAULT>
-TOKEN :
-{
-	<#DIGIT : ["0" - "9"]>
-}
-
-
-TOKEN:
-{
-  < DOUBLE_LITERAL: 
-	      ("-")? <INTEGER> ( "." <INTEGER> )? (<EXPONENT>)? 
-	    | ("-")? "." <INTEGER>
-  >
-  | < #EXPONENT: ["e","E"] (["+","-"])? (["0"-"9"])+ >
-	| <INTEGER : (<DIGIT>)+ >
-	| <FLOAT_LITERAL: <DOUBLE_LITERAL>("f"|"F")>
- }
-
-<DEFAULT>
-TOKEN :
-{
-	<STRING_LITERAL : ("\"" (<EscapeQuot> | ~["\""])* "\"") >
-	|
-	< #EscapeQuot: "\\\"" >
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<START_RECORD : "{">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<END_RECORD : "}">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<COMMA : ",">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<COLON : ":">
-}
-
-
-<DEFAULT>
-TOKEN :
-{
-	<START_ORDERED_LIST : "[">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<END_ORDERED_LIST : "]">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<START_UNORDERED_LIST : "{{">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<END_UNORDERED_LIST : "}}">
-}
-
-
-
-
-SKIP:
-{
-    " "
-|   "\t"
-|   "\r"
-|   "\n"
-}
diff --git a/asterix-runtime/src/main/javacc/nontagged/AdmLexer.jj b/asterix-runtime/src/main/javacc/nontagged/AdmLexer.jj
deleted file mode 100644
index f556d54..0000000
--- a/asterix-runtime/src/main/javacc/nontagged/AdmLexer.jj
+++ /dev/null
@@ -1,362 +0,0 @@
-options {
-
-	  
-       STATIC = false;
-	
-}
-
-PARSER_BEGIN(AdmLexer)
-
-package edu.uci.ics.asterix.adm.parser.nontagged;
-
-import java.io.*;
-
-public class AdmLexer {
-
-	public static void main(String args[]) throws ParseException, TokenMgrError, IOException, FileNotFoundException {
-	   	File file = new File(args[0]);
-	   	Reader freader = new BufferedReader(new InputStreamReader
-                      (new FileInputStream(file), "UTF-8"));
-		AdmLexer flexer = new AdmLexer(freader);
-		Token t = null;
-		do {
-		   t = flexer.next();		   
-		   System.out.println(AdmLexerConstants.tokenImage[t.kind]);
-		} while (t.kind != EOF);
-	    freader.close();
-	}
-	
-	public Token next() throws ParseException {
-	   return getNextToken();
-	}
-	
-	public String tokenKindToString(int tokenKind) {
-	   return AdmLexerConstants.tokenImage[tokenKind];
-	}
-}
-
-PARSER_END(AdmLexer)
-
-<DEFAULT>
-TOKEN :
-{
-	<NULL_LITERAL : "null">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<TRUE_LITERAL : "true">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<FALSE_LITERAL : "false">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<BOOLEAN_CONS : ("boolean") >	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<CONSTRUCTOR_OPEN : ("(")>	
-}
-
-
-<DEFAULT>
-TOKEN :
-{
-	<CONSTRUCTOR_CLOSE : (")")>	
-}
-
-<DEFAULT>
-TOKEN:
-{
-	<INT8_LITERAL : ("-" | "+")? (<DIGIT>)+ ("i8")>
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<INT8_CONS : ("int8") >	
-}
-
-<DEFAULT>
-TOKEN:
-{
-	<INT16_LITERAL : ("-" | "+")? (<DIGIT>)+ ("i16")>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<INT16_CONS : ("int16") >	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<INT32_LITERAL : ("-" | "+")? (<DIGIT>)+ ("i32")>
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<INT32_CONS : ("int32")>	
-}
-
-<DEFAULT>
-TOKEN:
-{
-	<INT64_LITERAL : ("-" | "+")? (<DIGIT>)+ ("i64")>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<INT64_CONS : ("int64") >	
-}
-
-<DEFAULT>
-TOKEN:
-{
-	<INT_LITERAL : ("-" | "+")? (<DIGIT>)+>
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<CIRCLE_LITERAL : "P"<DOUBLE_LITERAL>(",") <DOUBLE_LITERAL> ("R") <DOUBLE_LITERAL> >	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<CIRCLE_CONS : ("circle") >	
-}
-
-
-<DEFAULT>
-TOKEN :
-{
-	<TIMEZONE_LITERAL : (("+"|"-")<INTEGER>(":")<INTEGER>) | ("Z") >
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<DATE_LITERAL : ("-")?<INTEGER>("-")<INTEGER>("-")<INTEGER> (<TIMEZONE_LITERAL>)? >
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<DATE_CONS : ("date")>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<TIME_LITERAL : <INTEGER>(":")<INTEGER>(":")<INTEGER> ( (":")<INTEGER>)? (<TIMEZONE_LITERAL>)? >
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<TIME_CONS : ("time")>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<DATETIME_LITERAL : ("-")? <INTEGER>("-")<INTEGER>("-")<INTEGER>("T")<INTEGER>(":")<INTEGER>(":")<INTEGER> ( (":")<INTEGER>)? (<TIMEZONE_LITERAL>)?>
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<DATETIME_CONS : ("datetime")>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<DURATION_LITERAL : ("-")? ("D")(<INTEGER>("Y"))?(<INTEGER>("M"))?(<INTEGER>("D"))?(("T")(<INTEGER>("H"))?(<INTEGER>("M"))?(<INTEGER>("S"))?)?>
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<DURATION_CONS : ("duration")>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<#DIGIT : ["0" - "9"]>
-}
-
-TOKEN:
-{
-  < DOUBLE_LITERAL: 
-	      ("-" | "+")? <INTEGER> ( "." <INTEGER> )? (<EXPONENT>)? 
-	    | ("-" | "+")? "." <INTEGER>
-  >
-  | < #EXPONENT: ["e","E"] (["+","-"])? (["0"-"9"])+ >
-	| <INTEGER : (<DIGIT>)+ >
-	| <FLOAT_LITERAL: <DOUBLE_LITERAL>("f"|"F")>
- }
-
-
-<DEFAULT>
-TOKEN :
-{
-	<FLOAT_CONS : ("float")>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<DOUBLE_CONS : ("double")>	
-}
-
-
-<DEFAULT>
-TOKEN :
-{
-	<STRING_LITERAL : ("\"" (<EscapeQuot> | ~["\""])* "\"") >
-	|
-	< #EscapeQuot: "\\\"" >
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<STRING_CONS : ("string")>	
-}
-
-
-<DEFAULT>
-TOKEN :
-{
-	<POINT_LITERAL : "P"<DOUBLE_LITERAL>(",")<DOUBLE_LITERAL>>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<POINT_CONS : ("point")>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<POINT3D_LITERAL : "P" <DOUBLE_LITERAL>(",") <DOUBLE_LITERAL> (",") <DOUBLE_LITERAL>>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<POINT3D_CONS : ("point3d")>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<LINE_LITERAL : "P"<DOUBLE_LITERAL>(",") <DOUBLE_LITERAL> ("P") <DOUBLE_LITERAL> (",") <DOUBLE_LITERAL>>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<LINE_CONS : ("line")>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<POLYGON_LITERAL : "P"<DOUBLE_LITERAL>(",") <DOUBLE_LITERAL> ("P") <DOUBLE_LITERAL> (",") <DOUBLE_LITERAL> (("P") <DOUBLE_LITERAL> (",") <DOUBLE_LITERAL>)+>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<POLYGON_CONS : ("polygon")>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<RECTANGLE_CONS : ("rectangle")>	
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<RECTANGLE_LITERAL : "P"<DOUBLE_LITERAL>(",") <DOUBLE_LITERAL> ("P") <DOUBLE_LITERAL> (",") <DOUBLE_LITERAL>>		
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<START_RECORD : "{">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<END_RECORD : "}">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<COMMA : ",">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<COLON : ":">
-}
-
-
-<DEFAULT>
-TOKEN :
-{
-	<START_ORDERED_LIST : "[">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<END_ORDERED_LIST : "]">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<START_UNORDERED_LIST : "{{">
-}
-
-<DEFAULT>
-TOKEN :
-{
-	<END_UNORDERED_LIST : "}}">
-}
-
-
-
-
-SKIP:
-{
-    " "
-|   "\t"
-|   "\r"
-|   "\n"
-}
diff --git a/asterix-runtime/src/main/resources/adm.grammar b/asterix-runtime/src/main/resources/adm.grammar
new file mode 100644
index 0000000..56c7212
--- /dev/null
+++ b/asterix-runtime/src/main/resources/adm.grammar
@@ -0,0 +1,63 @@
+# LEXER GENERATOR configuration file
+# ---------------------------------------
+# Place *first* the generic configuration
+# then list your grammar.
+
+PACKAGE:          edu.uci.ics.asterix.runtime.operators.file.adm
+LEXER_NAME:       AdmLexer
+
+TOKENS:
+
+BOOLEAN_CONS   = string(boolean)
+INT8_CONS      = string(int8)
+INT16_CONS     = string(int16)
+INT32_CONS     = string(int32)
+INT64_CONS     = string(int64)
+FLOAT_CONS     = string(float)
+DOUBLE_CONS    = string(double)
+DATE_CONS      = string(date)
+DATETIME_CONS  = string(datetime)
+DURATION_CONS  = string(duration)
+STRING_CONS    = string(string)
+POINT_CONS     = string(point)
+POINT3D_CONS   = string(point3d)
+LINE_CONS      = string(line)
+POLYGON_CONS   = string(polygon)
+RECTANGLE_CONS = string(rectangle)
+CIRCLE_CONS    = string(circle)
+TIME_CONS      = string(time)
+INTERVAL_TIME_CONS      = string(interval_time)
+INTERVAL_DATE_CONS      = string(interval_date)
+INTERVAL_DATETIME_CONS  = string(interval_datetime)
+
+NULL_LITERAL   = string(null)
+TRUE_LITERAL   = string(true)
+FALSE_LITERAL  = string(false)
+
+CONSTRUCTOR_OPEN     = char(()
+CONSTRUCTOR_CLOSE    = char())
+START_RECORD         = char({)
+END_RECORD           = char(})
+COMMA                = char(\,)
+COLON                = char(:)
+START_ORDERED_LIST   = char([)
+END_ORDERED_LIST     = char(])
+START_UNORDERED_LIST = string({{)
+END_UNORDERED_LIST   = string(}})
+
+STRING_LITERAL       = char("), anythingUntil(")
+
+INT_LITERAL          = signOrNothing(), digitSequence()
+INT8_LITERAL         = token(INT_LITERAL), string(i8)
+INT16_LITERAL        = token(INT_LITERAL), string(i16)
+INT32_LITERAL        = token(INT_LITERAL), string(i32)
+INT64_LITERAL        = token(INT_LITERAL), string(i64)
+
+@EXPONENT            = caseInsensitiveChar(e), signOrNothing(), digitSequence()
+
+DOUBLE_LITERAL		 = signOrNothing(), char(.), digitSequence()
+DOUBLE_LITERAL		 = signOrNothing(), digitSequence(), char(.), digitSequence()
+DOUBLE_LITERAL		 = signOrNothing(), digitSequence(), char(.), digitSequence(), token(@EXPONENT)
+DOUBLE_LITERAL		 = signOrNothing(), digitSequence(), token(@EXPONENT)
+
+FLOAT_LITERAL		 = token(DOUBLE_LITERAL), caseInsensitiveChar(f)
diff --git a/asterix-test-framework/pom.xml b/asterix-test-framework/pom.xml
index 2f0142e..9462b5d 100755
--- a/asterix-test-framework/pom.xml
+++ b/asterix-test-framework/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
 	<parent>
 		<artifactId>asterix</artifactId>
@@ -13,21 +14,21 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
 				</configuration>
 			</plugin>
-            <plugin>
-                <groupId>org.jvnet.jaxb2.maven2</groupId>
-                <artifactId>maven-jaxb2-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <goals>
-                            <goal>generate</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
+			<plugin>
+				<groupId>org.jvnet.jaxb2.maven2</groupId>
+				<artifactId>maven-jaxb2-plugin</artifactId>
+				<executions>
+					<execution>
+						<goals>
+							<goal>generate</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
 		</plugins>
 	</build>
 
diff --git a/asterix-tools/pom.xml b/asterix-tools/pom.xml
index 62176d9..0440618 100644
--- a/asterix-tools/pom.xml
+++ b/asterix-tools/pom.xml
@@ -15,8 +15,8 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
 				</configuration>
 			</plugin>
 			<plugin>
diff --git a/asterix-transactions/pom.xml b/asterix-transactions/pom.xml
index 7db5cd9..a402077 100644
--- a/asterix-transactions/pom.xml
+++ b/asterix-transactions/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
 	<parent>
 		<artifactId>asterix</artifactId>
@@ -16,8 +17,8 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
 				</configuration>
 			</plugin>
 		</plugins>
@@ -25,9 +26,9 @@
 	</build>
 
 	<dependencies>
-	    <dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-common</artifactId>
-  	</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-common</artifactId>
+		</dependency>
 	</dependencies>
 </project>
diff --git a/pom.xml b/pom.xml
index 5080813..8999ad9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,6 +6,11 @@
 	<version>0.0.4-SNAPSHOT</version>
 	<packaging>pom</packaging>
 
+    <properties>
+    	<algebricks.version>0.2.3-SNAPSHOT</algebricks.version>
+    	<hyracks.version>0.2.3-SNAPSHOT</hyracks.version>
+    </properties>
+
 	<build>
 		<plugins>
 			<plugin>
@@ -20,11 +25,6 @@
 		</plugins>
 	</build>
 
-    <properties>
-    	<algebricks.version>0.2.3-SNAPSHOT</algebricks.version>
-    	<hyracks.version>0.2.3-SNAPSHOT</hyracks.version>
-    </properties>
-
 	<scm>
 		<connection>scm:svn:https://grape.ics.uci.edu/svn/asterix/trunk/asterix</connection>
 		<developerConnection>scm:svn:https://grape.ics.uci.edu/svn/asterix/trunk/asterix</developerConnection>
@@ -84,6 +84,7 @@
                 <module>asterix-metadata</module>
                 <module>asterix-dist</module>
                 <module>asterix-test-framework</module>
+        <module>asterix-maven-plugins</module>
         </modules>
 
 	<repositories>
@@ -142,7 +143,7 @@
             <optional>true</optional>
         </dependency>
     </dependencies>
-    <dependencyManagement>
+        <dependencyManagement>
     	<dependencies>
     		<dependency>
     			<groupId>edu.uci.ics.hyracks</groupId>