Merge branch 'master' into westmann/one_dataset_arg
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index d4e5e22..3ff4549 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -384,11 +384,7 @@
         Dataset dataset = null;
         try {
             DatasetDecl dd = (DatasetDecl) stmt;
-            dataverseName = dd.getDataverse() != null ? dd.getDataverse().getValue()
-                    : activeDefaultDataverse != null ? activeDefaultDataverse.getDataverseName() : null;
-            if (dataverseName == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            dataverseName = getActiveDataverseName(dd.getDataverse());
             datasetName = dd.getName().getValue();
 
             DatasetType dsType = dd.getDatasetType();
@@ -553,11 +549,7 @@
         JobSpecification spec = null;
         try {
             CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
-            dataverseName = stmtCreateIndex.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : stmtCreateIndex.getDataverseName().getValue();
-            if (dataverseName == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            dataverseName = getActiveDataverseName(stmtCreateIndex.getDataverseName());
             datasetName = stmtCreateIndex.getDatasetName().getValue();
 
             Dataset ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
@@ -686,15 +678,11 @@
 
         try {
             TypeDecl stmtCreateType = (TypeDecl) stmt;
-            String dataverseName = stmtCreateType.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : stmtCreateType.getDataverseName().getValue();
-            if (dataverseName == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            String dataverseName = getActiveDataverseName(stmtCreateType.getDataverseName());
             String typeName = stmtCreateType.getIdent().getValue();
             Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
             if (dv == null) {
-                throw new AlgebricksException("Unknonw dataverse " + dataverseName);
+                throw new AlgebricksException("Unknown dataverse " + dataverseName);
             }
             Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
             if (dt != null) {
@@ -849,11 +837,7 @@
         List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
         try {
             DropStatement stmtDelete = (DropStatement) stmt;
-            dataverseName = stmtDelete.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : stmtDelete.getDataverseName().getValue();
-            if (dataverseName == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            dataverseName = getActiveDataverseName(stmtDelete.getDataverseName());
             datasetName = stmtDelete.getDatasetName().getValue();
 
             Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
@@ -960,11 +944,7 @@
         try {
             IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
             datasetName = stmtIndexDrop.getDatasetName().getValue();
-            dataverseName = stmtIndexDrop.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : stmtIndexDrop.getDataverseName().getValue();
-            if (dataverseName == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            dataverseName = getActiveDataverseName(stmtIndexDrop.getDataverseName());
 
             Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
             if (ds == null) {
@@ -1062,11 +1042,7 @@
 
         try {
             TypeDropStatement stmtTypeDrop = (TypeDropStatement) stmt;
-            String dataverseName = stmtTypeDrop.getDataverseName() == null ? (activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName()) : stmtTypeDrop.getDataverseName().getValue();
-            if (dataverseName == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            String dataverseName = getActiveDataverseName(stmtTypeDrop.getDataverseName());
             String typeName = stmtTypeDrop.getTypeName().getValue();
             Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
             if (dt == null) {
@@ -1117,11 +1093,7 @@
 
         try {
             CreateFunctionStatement cfs = (CreateFunctionStatement) stmt;
-            String dataverse = cfs.getSignature().getNamespace() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : cfs.getSignature().getNamespace();
-            if (dataverse == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            String dataverse = getActiveDataverseName(cfs.getSignature().getNamespace());
             Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
             if (dv == null) {
                 throw new AlgebricksException("There is no dataverse with this name " + dataverse + ".");
@@ -1174,8 +1146,7 @@
         List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
         try {
             LoadFromFileStatement loadStmt = (LoadFromFileStatement) stmt;
-            String dataverseName = loadStmt.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : loadStmt.getDataverseName().getValue();
+            String dataverseName = getActiveDataverseName(loadStmt.getDataverseName());
             CompiledLoadFromFileStatement cls = new CompiledLoadFromFileStatement(dataverseName, loadStmt
                     .getDatasetName().getValue(), loadStmt.getAdapter(), loadStmt.getProperties(),
                     loadStmt.dataIsAlreadySorted());
@@ -1223,8 +1194,7 @@
         try {
             metadataProvider.setWriteTransaction(true);
             WriteFromQueryResultStatement st1 = (WriteFromQueryResultStatement) stmt;
-            String dataverseName = st1.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : st1.getDataverseName().getValue();
+            String dataverseName = getActiveDataverseName(st1.getDataverseName());
             CompiledWriteFromQueryResultStatement clfrqs = new CompiledWriteFromQueryResultStatement(dataverseName, st1
                     .getDatasetName().getValue(), st1.getQuery(), st1.getVarCounter());
 
@@ -1255,8 +1225,7 @@
         try {
             metadataProvider.setWriteTransaction(true);
             InsertStatement stmtInsert = (InsertStatement) stmt;
-            String dataverseName = stmtInsert.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : stmtInsert.getDataverseName().getValue();
+            String dataverseName = getActiveDataverseName(stmtInsert.getDataverseName());
             CompiledInsertStatement clfrqs = new CompiledInsertStatement(dataverseName, stmtInsert.getDatasetName()
                     .getValue(), stmtInsert.getQuery(), stmtInsert.getVarCounter());
             JobSpecification compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
@@ -1289,8 +1258,7 @@
         try {
             metadataProvider.setWriteTransaction(true);
             DeleteStatement stmtDelete = (DeleteStatement) stmt;
-            String dataverseName = stmtDelete.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : stmtDelete.getDataverseName().getValue();
+            String dataverseName = getActiveDataverseName(stmtDelete.getDataverseName());
             CompiledDeleteStatement clfrqs = new CompiledDeleteStatement(stmtDelete.getVariableExpr(), dataverseName,
                     stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getVarCounter(),
                     metadataProvider);
@@ -1340,8 +1308,7 @@
 
         try {
             BeginFeedStatement bfs = (BeginFeedStatement) stmt;
-            String dataverseName = bfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : bfs.getDataverseName().getValue();
+            String dataverseName = getActiveDataverseName(bfs.getDataverseName());
 
             CompiledBeginFeedStatement cbfs = new CompiledBeginFeedStatement(dataverseName, bfs.getDatasetName()
                     .getValue(), bfs.getQuery(), bfs.getVarCounter());
@@ -1389,8 +1356,7 @@
 
         try {
             ControlFeedStatement cfs = (ControlFeedStatement) stmt;
-            String dataverseName = cfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : cfs.getDatasetName().getValue();
+            String dataverseName = getActiveDataverseName(cfs.getDataverseName());
             CompiledControlFeedStatement clcfs = new CompiledControlFeedStatement(cfs.getOperationType(),
                     dataverseName, cfs.getDatasetName().getValue(), cfs.getAlterAdapterConfParams());
             JobSpecification jobSpec = FeedOperations.buildControlFeedJobSpec(clcfs, metadataProvider);
@@ -1535,6 +1501,23 @@
         return format;
     }
 
+    private String getActiveDataverseName(String dataverse)
+            throws AlgebricksException {
+        if (dataverse != null) {
+            return dataverse;
+        }
+        if (activeDefaultDataverse != null) {
+            return activeDefaultDataverse.getDataverseName();
+        }
+        throw new AlgebricksException("dataverse not specified");
+    }
+    
+    private String getActiveDataverseName(Identifier dataverse)
+            throws AlgebricksException {
+        return getActiveDataverseName(
+                dataverse != null ? dataverse.getValue() : null);
+    }
+    
     private void acquireWriteLatch() {
         MetadataManager.INSTANCE.acquireWriteLatch();
     }
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.1.ddl.aql
new file mode 100644
index 0000000..73d63c9
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.1.ddl.aql
@@ -0,0 +1,19 @@
+/*
+ * Description  : create a dataset using year-month-duration for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Emp as open {
+id:year-month-duration,
+dur:year-month-duration,
+name:string
+}
+
+create dataset Employee(Emp) primary key id;
+create index TestSecondIndex on Employee(dur);
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.2.update.aql
new file mode 100644
index 0000000..bd96595
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.2.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using year-month-duration for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+insert into dataset Employee({"id":year-month-duration("P16Y"), "dur":year-month-duration("-P23Y"), "name": "John"})
+insert into dataset Employee({"id":year-month-duration("-P37M"), "dur":year-month-duration("P1Y48M"), "name": "Alex"})
+insert into dataset Employee({"id":year-month-duration("P2013Y"), "dur":year-month-duration("P7M"), "name": "Bob"})
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.3.query.aql
new file mode 100644
index 0000000..8244c6d
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.3.query.aql
@@ -0,0 +1,13 @@
+/*
+ * Description  : create a dataset using year-month-duration for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+for $x in dataset('Employee')
+where $x.dur > year-month-duration("P1Y")
+return $x
+
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.1.ddl.aql
new file mode 100644
index 0000000..9697c32
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.1.ddl.aql
@@ -0,0 +1,19 @@
+/*
+ * Description  : create a dataset using datetime for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Emp as open {
+id:datetime,
+dt:datetime,
+name:string
+}
+
+create dataset Employee(Emp) primary key id;
+create index TestSecondIndex on Employee(dt);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.2.update.aql
new file mode 100644
index 0000000..d66da92
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.2.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using datetime for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+insert into dataset Employee({"id":datetime("1900-01-01T00:00:00"), "dt":datetime("1900-01-01T00:00:00"), "name": "John"})
+insert into dataset Employee({"id":datetime("2000-01-01T00:00:00"), "dt":datetime("2000-01-01T00:00:00"), "name": "Alex"})
+insert into dataset Employee({"id":datetime("2013-01-01T00:00:00"), "dt":datetime("2013-01-01T00:00:00"), "name": "Bob"})
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.3.query.aql
new file mode 100644
index 0000000..eb92aea
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using datetime for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+for $x in dataset('Employee')
+where $x.dt > datetime("2007-07-07T07:07:07.777Z")
+return $x
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.1.ddl.aql
new file mode 100644
index 0000000..d3c19f4
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.1.ddl.aql
@@ -0,0 +1,19 @@
+/*
+ * Description  : create a dataset using time for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Emp as open {
+id:time,
+tm:time,
+name:string
+}
+
+create dataset Employee(Emp) primary key id;
+create index TestSecondIndex on Employee(tm);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.2.update.aql
new file mode 100644
index 0000000..6f44d6c
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.2.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using time for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+insert into dataset Employee({"id":time("03:10:00.493Z"), "tm":time("03:10:00.493Z"), "name": "John"})
+insert into dataset Employee({"id":time("20:37:19+08:00"), "tm":time("20:37:19+08:00"), "name": "Alex"})
+insert into dataset Employee({"id":time("21:39:17.948-04:00"), "tm":time("21:39:17.948-04:00"), "name": "Bob"})
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.3.query.aql
new file mode 100644
index 0000000..f0392b1
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using time for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+for $x in dataset('Employee')
+where $x.tm > time("07:07:07.777Z")
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.1.ddl.aql
new file mode 100644
index 0000000..ab88978
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.1.ddl.aql
@@ -0,0 +1,19 @@
+/*
+ * Description  : create a dataset using date for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Emp as open {
+id:date,
+dt:date,
+name:string
+}
+
+create dataset Employee(Emp) primary key id;
+create index TestSecondIndex on Employee(dt);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.2.update.aql
new file mode 100644
index 0000000..b8d415e
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.2.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using date for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+insert into dataset Employee({"id":date("2010-01-01"), "dt":date("2010-01-01"), "name": "John"})
+insert into dataset Employee({"id":date("-1912-10-11"), "dt":date("-1912-10-11"), "name": "Alex"})
+insert into dataset Employee({"id":date("0732-02-02"), "dt":date("0732-02-02"), "name": "Bob"})
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.3.query.aql
new file mode 100644
index 0000000..128a2a6
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using date for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+for $x in dataset('Employee')
+where $x.dt > date("2007-07-07")
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.1.ddl.aql
new file mode 100644
index 0000000..c6a8836
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.1.ddl.aql
@@ -0,0 +1,19 @@
+/*
+ * Description  : create a dataset using day-time-duration for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Emp as open {
+id:day-time-duration,
+dur:day-time-duration,
+name:string
+}
+
+create dataset Employee(Emp) primary key id;
+create index TestSecondIndex on Employee(dt);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.2.update.aql
new file mode 100644
index 0000000..d5aafe9
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.2.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using day-time-duration for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+insert into dataset Employee({"id":day-time-duration("P380DT983M"), "dur":day-time-duration("P380DT983M"), "name": "John"})
+insert into dataset Employee({"id":day-time-duration("-P3829H849.392S"), "dur":day-time-duration("-P3829H849.392S"), "name": "Alex"})
+insert into dataset Employee({"id":day-time-duration("PT93847M0.392S"), "dur":day-time-duration("PT93847M0.392S"), "name": "Bob"})
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.3.query.aql
new file mode 100644
index 0000000..dbca351
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using day-time-duration for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+ 
+use dataverse test;
+
+for $x in dataset('Employee')
+where $x.dur > day-time-duration("P350D")
+return $x
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_0/issue363_temporal_sec_key_0.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_0/issue363_temporal_sec_key_0.1.adm
new file mode 100644
index 0000000..f1a3b14
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_0/issue363_temporal_sec_key_0.1.adm
@@ -0,0 +1 @@
+{ "id": year-month-duration("-P3Y1M"), "dur": year-month-duration("P5Y"), "name": "Alex" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_1/issue363_temporal_sec_key_1.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_1/issue363_temporal_sec_key_1.1.adm
new file mode 100644
index 0000000..1703ccd
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_1/issue363_temporal_sec_key_1.1.adm
@@ -0,0 +1 @@
+{ "id": datetime("2013-01-01T00:00:00.000Z"), "dt": datetime("2013-01-01T00:00:00.000Z"), "name": "Bob" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_2/issue363_temporal_sec_key_2.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_2/issue363_temporal_sec_key_2.1.adm
new file mode 100644
index 0000000..1e32e45
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_2/issue363_temporal_sec_key_2.1.adm
@@ -0,0 +1,2 @@
+{ "id": time("12:37:19.000Z"), "tm": time("12:37:19.000Z"), "name": "Alex" }
+{ "id": time("01:39:17.948Z"), "tm": time("01:39:17.948Z"), "name": "Bob" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_3/issue363_temporal_sec_key_3.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_3/issue363_temporal_sec_key_3.1.adm
new file mode 100644
index 0000000..fb4a286
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_3/issue363_temporal_sec_key_3.1.adm
@@ -0,0 +1 @@
+{ "id": date("2010-01-01"), "dt": date("2010-01-01"), "name": "John" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_4/issue363_temporal_sec_key_4.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_4/issue363_temporal_sec_key_4.1.adm
new file mode 100644
index 0000000..dacaf15
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_4/issue363_temporal_sec_key_4.1.adm
@@ -0,0 +1 @@
+{ "id": day-time-duration("P380DT16H23M"), "dur": day-time-duration("P380DT16H23M"), "name": "John" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/testsuite.xml b/asterix-app/src/test/resources/metadata/testsuite.xml
index 63487d4..e9ed34d 100644
--- a/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -175,6 +175,31 @@
         <output-dir compare="Text">issue_363_temporal_key_4</output-dir>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_363_temporal_sec_key_0">
+        <output-dir compare="Text">issue_363_temporal_sec_key_0</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_363_temporal_sec_key_1">
+        <output-dir compare="Text">issue_363_temporal_sec_key_1</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_363_temporal_sec_key_2">
+        <output-dir compare="Text">issue_363_temporal_sec_key_2</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_363_temporal_sec_key_3">
+        <output-dir compare="Text">issue_363_temporal_sec_key_3</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_363_temporal_sec_key_4">
+        <output-dir compare="Text">issue_363_temporal_sec_key_4</output-dir>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="exception">
     <test-case FilePath="exception">
diff --git a/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan b/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan
index 494b208..76fd0bd 100644
--- a/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan
@@ -11,11 +11,11 @@
                           -- NESTED_TUPLE_SOURCE  |LOCAL|
                     }
               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                -- STABLE_SORT [$$23(ASC), $$6(ASC)]  |PARTITIONED|
+                -- STABLE_SORT [$$23(ASC), $$4(ASC)]  |PARTITIONED|
                   -- HASH_PARTITION_EXCHANGE [$$23]  |PARTITIONED|
                     -- STREAM_PROJECT  |PARTITIONED|
                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                        -- HYBRID_HASH_JOIN [$$1][$$6]  |PARTITIONED|
+                        -- HYBRID_HASH_JOIN [$$1][$$4]  |PARTITIONED|
                           -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
                             -- STREAM_PROJECT  |PARTITIONED|
                               -- UNNEST  |PARTITIONED|
@@ -25,7 +25,7 @@
                                       -- DATASOURCE_SCAN  |PARTITIONED|
                                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                           -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                          -- HASH_PARTITION_EXCHANGE [$$6]  |PARTITIONED|
+                          -- HASH_PARTITION_EXCHANGE [$$4]  |PARTITIONED|
                             -- RUNNING_AGGREGATE  |PARTITIONED|
                               -- STREAM_PROJECT  |PARTITIONED|
                                 -- SORT_MERGE_EXCHANGE [$$24(DESC) ]  |PARTITIONED|
@@ -37,13 +37,13 @@
                                                   -- NESTED_TUPLE_SOURCE  |LOCAL|
                                               }
                                         -- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$30(ASC)] HASH:[$$30]  |PARTITIONED|
-                                          -- PRE_CLUSTERED_GROUP_BY[$$5]  |PARTITIONED|
+                                          -- PRE_CLUSTERED_GROUP_BY[$$3]  |PARTITIONED|
                                                   {
                                                     -- AGGREGATE  |LOCAL|
                                                       -- NESTED_TUPLE_SOURCE  |LOCAL|
                                                   }
                                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                              -- STABLE_SORT [$$5(ASC)]  |PARTITIONED|
+                                              -- STABLE_SORT [$$3(ASC)]  |PARTITIONED|
                                                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                   -- STREAM_PROJECT  |PARTITIONED|
                                                     -- UNNEST  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inlined_q18_large_volume_customer.plan b/asterix-app/src/test/resources/optimizerts/results/inlined_q18_large_volume_customer.plan
index 0713f96..0c7b95d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inlined_q18_large_volume_customer.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inlined_q18_large_volume_customer.plan
@@ -28,7 +28,7 @@
                                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                         -- STREAM_PROJECT  |PARTITIONED|
                                           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                            -- HYBRID_HASH_JOIN [$$57][$$4]  |PARTITIONED|
+                                            -- HYBRID_HASH_JOIN [$$57][$$3]  |PARTITIONED|
                                               -- HASH_PARTITION_EXCHANGE [$$57]  |PARTITIONED|
                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
@@ -47,7 +47,7 @@
                                                               -- DATASOURCE_SCAN  |PARTITIONED|
                                                                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                   -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                              -- HASH_PARTITION_EXCHANGE [$$4]  |PARTITIONED|
+                                              -- HASH_PARTITION_EXCHANGE [$$3]  |PARTITIONED|
                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                   -- STREAM_SELECT  |PARTITIONED|
                                                     -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.1.ddl.aql
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.1.ddl.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.2.update.aql
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.2.update.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.3.query.aql
new file mode 100644
index 0000000..6b09eec
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.3.query.aql
@@ -0,0 +1,11 @@
+/*
+ * Description  : This test case is to verify the fix for issue443
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=443
+ * Expected Res : Fail
+ * Date         : 22th May 2013
+ */
+
+
+for $a in [ {"f" : 19, "g": 1} , {"f" : 12, "g": 2} , {"f" : 10, "g": 1} , {"f" : 17, "g": 1}, {"f" : 12, "g": 4} ]
+distinct by $a.f
+return $a
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.1.ddl.aql
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.1.ddl.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.2.update.aql
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.2.update.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.3.query.aql
new file mode 100644
index 0000000..602468c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.3.query.aql
@@ -0,0 +1,11 @@
+/*
+ * Description  : This test case is to verify the fix for issue443
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=443
+ * Expected Res : Fail
+ * Date         : 22th May 2013
+ */
+
+
+for $a in [ {"f" : 19} , {"f" : 12} , {"f" : 10} , {"f" : 17}, {"f" : 12} ]
+distinct by $a.f
+return $a
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.3.query.aql
index 01ef318..230aa40 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.3.query.aql
@@ -8,4 +8,5 @@
 use dataverse feeds;
 
 for $x in dataset('TweetFeed')
+order by $x.id
 return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.3.query.aql
index 5146fb5..714dd80 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.3.query.aql
@@ -8,4 +8,5 @@
 use dataverse feeds;
 
 for $x in dataset('TweetFeed')
+order by $x.id
 return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.3.query.aql
index d94576b..48e18e2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.3.query.aql
@@ -7,4 +7,5 @@
 use dataverse feeds;
 
 for $x in dataset('TweetFeed')
+order by $x.id
 return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.3.query.aql
index efa1f9c..f1127f0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.3.query.aql
@@ -9,5 +9,6 @@
 use dataverse feeds;
 
 for $x in dataset('TweetFeed')
+order by $x.id
 return $x
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.3.query.aql
index 81d6cf6..02ff97d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.3.query.aql
@@ -11,6 +11,7 @@
 set simthreshold "3";
 
 for $fbu in dataset FacebookUsers
+order by $fbu.id
 return {
     "id": $fbu.id,
     "name": $fbu.name,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue423/query-issue423.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue423/query-issue423.1.ddl.aql
new file mode 100644
index 0000000..05eb126
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue423/query-issue423.1.ddl.aql
@@ -0,0 +1,8 @@
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type TestType as open { id : int32 ,fname:string, lname:string}
+
+create dataset t2(TestType) primary key fname,lname;
+create dataset t1(TestType) primary key fname,lname;
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue423/query-issue423.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue423/query-issue423.2.update.aql
new file mode 100644
index 0000000..71904f1
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue423/query-issue423.2.update.aql
@@ -0,0 +1,6 @@
+use dataverse test;
+
+insert into dataset t1({"id":123,"fname":"John","lname":"Doe"});
+insert into dataset t1({"id":122,"fname":"Bruce","lname":"Li"});
+insert into dataset t2({"id":23,"fname":"John","lname":"Doe"});
+insert into dataset t2({"id":24,"fname":"Ravi","lname":"Khanna"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue423/query-issue423.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue423/query-issue423.3.query.aql
new file mode 100644
index 0000000..a07b185
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue423/query-issue423.3.query.aql
@@ -0,0 +1,6 @@
+use dataverse test;
+
+for $l in dataset t1
+for $m in dataset t2
+	where $l.name=$m.name
+return {"l":$l,"m":$m};
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.1.ddl.aql
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.1.ddl.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.2.update.aql
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.2.update.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.3.query.aql
new file mode 100644
index 0000000..4da91be
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.3.query.aql
@@ -0,0 +1,10 @@
+/*
+ * Description  : This test case is to verify the fix for issue442
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=442
+ * Expected Res : Fail
+ * Date         : 22th May 2013
+ */
+
+for $a in [ {"f" : 100} ,  {"f" : 0},  {"f" : -1}, {"f" : null}, {"f" : 999999}, {"f" : 1} , {"f" : "zzzzz"}]
+order by $a.f desc
+return $a
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/spatial/create-rtree-index/create-rtree-index.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/spatial/create-rtree-index/create-rtree-index.3.query.aql
index 53b1c30..515acc0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/spatial/create-rtree-index/create-rtree-index.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/spatial/create-rtree-index/create-rtree-index.3.query.aql
@@ -6,4 +6,5 @@
 use dataverse test;
 
 for $a in dataset('MyData')
+order by $a.id
 return $a.id
diff --git a/asterix-app/src/test/resources/runtimets/results/distinct/query-issue443-2/query-issue443.1.adm b/asterix-app/src/test/resources/runtimets/results/distinct/query-issue443-2/query-issue443.1.adm
new file mode 100644
index 0000000..1a6e796
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/distinct/query-issue443-2/query-issue443.1.adm
@@ -0,0 +1,4 @@
+{ "f": 10, "g": 1 }
+{ "f": 12, "g": 4 }
+{ "f": 17, "g": 1 }
+{ "f": 19, "g": 1 }
diff --git a/asterix-app/src/test/resources/runtimets/results/distinct/query-issue443/query-issue443.1.adm b/asterix-app/src/test/resources/runtimets/results/distinct/query-issue443/query-issue443.1.adm
new file mode 100644
index 0000000..a4a6e48
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/distinct/query-issue443/query-issue443.1.adm
@@ -0,0 +1,4 @@
+{ "f": 10 }
+{ "f": 12 }
+{ "f": 17 }
+{ "f": 19 }
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue377/query-issue377.1.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue377/query-issue377.1.adm
index c3bb80f..1a7caf5 100644
--- a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue377/query-issue377.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue377/query-issue377.1.adm
@@ -1,23 +1,18 @@
 { "id": 9142198, "similar-users": [  ], "name": "SherryFea" }
 { "id": 9313492, "similar-users": [  ], "name": "TeraWolfe" }
 { "id": 9478720, "similar-users": [  ], "name": "AngeliaKettlewell" }
-{ "id": 10001080, "similar-users": [  ], "name": "GarrettBode" }
-{ "id": 10179538, "similar-users": [  ], "name": "OrlandoBaxter" }
-{ "id": 10307032, "similar-users": [  ], "name": "QuentinSauter" }
-{ "id": 10394488, "similar-users": [  ], "name": "OswaldRay" }
-{ "id": 10423588, "similar-users": [  ], "name": "ShirleneRuch" }
-{ "id": 10495420, "similar-users": [  ], "name": "WendyMcloskey" }
-{ "id": 11307946, "similar-users": [  ], "name": "HelgaStough" }
-{ "id": 11447332, "similar-users": [  ], "name": "SherisseMaugham" }
-{ "id": 11570326, "similar-users": [  ], "name": "LindenFilby" }
-{ "id": 11951098, "similar-users": [  ], "name": "TeraByers" }
-{ "id": 11954992, "similar-users": [  ], "name": "CaitlinLangston" }
 { "id": 9510451, "similar-users": [  ], "name": "ChuckFinck" }
 { "id": 9594523, "similar-users": [  ], "name": "TamWillcox" }
 { "id": 9629395, "similar-users": [  ], "name": "JuliusWire" }
 { "id": 9988417, "similar-users": [  ], "name": "ColineLane" }
+{ "id": 10001080, "similar-users": [  ], "name": "GarrettBode" }
+{ "id": 10179538, "similar-users": [  ], "name": "OrlandoBaxter" }
 { "id": 10272571, "similar-users": [  ], "name": "JarrettGoldvogel" }
+{ "id": 10307032, "similar-users": [  ], "name": "QuentinSauter" }
 { "id": 10361965, "similar-users": [  ], "name": "ArlenFlick" }
+{ "id": 10394488, "similar-users": [  ], "name": "OswaldRay" }
+{ "id": 10423588, "similar-users": [  ], "name": "ShirleneRuch" }
+{ "id": 10495420, "similar-users": [  ], "name": "WendyMcloskey" }
 { "id": 10498285, "similar-users": [  ], "name": "KileyBridger" }
 { "id": 10733617, "similar-users": [  ], "name": "LeonardoKight" }
 { "id": 10874791, "similar-users": [  ], "name": "HaydeeGarratt" }
@@ -25,5 +20,10 @@
 { "id": 11061631, "similar-users": [  ], "name": "MaxeneKellogg" }
 { "id": 11068231, "similar-users": [  ], "name": "DinahSwink" }
 { "id": 11140213, "similar-users": [  ], "name": "MontgomeryWhittier" }
+{ "id": 11307946, "similar-users": [  ], "name": "HelgaStough" }
 { "id": 11381089, "similar-users": [  ], "name": "EarleneAmmons" }
+{ "id": 11447332, "similar-users": [  ], "name": "SherisseMaugham" }
+{ "id": 11570326, "similar-users": [  ], "name": "LindenFilby" }
 { "id": 11675221, "similar-users": [  ], "name": "CalantheGearhart" }
+{ "id": 11951098, "similar-users": [  ], "name": "TeraByers" }
+{ "id": 11954992, "similar-users": [  ], "name": "CaitlinLangston" }
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue423/query-issue423.1.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue423/query-issue423.1.adm
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue423/query-issue423.1.adm
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue442/query-issue442.1.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue442/query-issue442.1.adm
new file mode 100644
index 0000000..e34c554
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue442/query-issue442.1.adm
@@ -0,0 +1,7 @@
+{ "f": "zzzzz" }
+{ "f": 999999 }
+{ "f": 100 }
+{ "f": 1 }
+{ "f": 0 }
+{ "f": -1 }
+{ "f": null }
diff --git a/asterix-app/src/test/resources/runtimets/results/spatial/create-rtree-index/create-rtree-index.1.adm b/asterix-app/src/test/resources/runtimets/results/spatial/create-rtree-index/create-rtree-index.1.adm
index b9c6dc7..d4de868 100644
--- a/asterix-app/src/test/resources/runtimets/results/spatial/create-rtree-index/create-rtree-index.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/spatial/create-rtree-index/create-rtree-index.1.adm
@@ -1,21 +1,21 @@
-2
-4
-6
-8
-10
-12
-14
-16
-18
-20
 1
+2
 3
+4
 5
+6
 7
+8
 9
+10
 11
+12
 13
+14
 15
+16
 17
+18
 19
+20
 21
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index 956c76d..9e6c308 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -2805,6 +2805,12 @@
         <output-dir compare="Text">query-issue258</output-dir>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="open-closed">
+      <compilation-unit name="query-issue442">
+        <output-dir compare="Text">query-issue442</output-dir>
+        <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="quantifiers">
     <test-case FilePath="quantifiers">
@@ -4349,4 +4355,16 @@
       </compilation-unit>
     </test-case>
   </test-group>
+  <test-group name="distinct">
+  	<test-case FilePath="distinct">
+      <compilation-unit name="query-issue443">
+        <output-dir compare="Text">query-issue443</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="distinct">
+      <compilation-unit name="query-issue443-2">
+        <output-dir compare="Text">query-issue443-2</output-dir>
+      </compilation-unit>
+    </test-case>
+  </test-group>
 </test-suite>
diff --git a/asterix-aql/src/main/javacc/AQL.jj b/asterix-aql/src/main/javacc/AQL.jj
index d593d41..a19a15f 100644
--- a/asterix-aql/src/main/javacc/AQL.jj
+++ b/asterix-aql/src/main/javacc/AQL.jj
@@ -1820,21 +1820,14 @@
 	extendCurrentScope();
 }
 {
-    "for" varExp = Variable()
-    {
-     	getCurrentScope().addNewVarSymbolToScope(varExp.getVar());
-	}
-	("at" varPos = Variable()
-	  {
-	     getCurrentScope().addNewVarSymbolToScope(varPos.getVar());
-	  } 
-	 )? 
-      "in" ( inExp = Expression() )
+    "for" varExp = Variable() ("at" varPos = Variable())?  "in" ( inExp = Expression() )
     {
       fc.setVarExpr(varExp);
+      getCurrentScope().addNewVarSymbolToScope(varExp.getVar());
       fc.setInExpr(inExp);
       if (varPos != null) {
         fc.setPosExpr(varPos);
+	    getCurrentScope().addNewVarSymbolToScope(varPos.getVar());
       }
       return fc;
     }
diff --git a/asterix-doc/src/site/markdown/AccessingExternalDataInAsterixDB.md b/asterix-doc/src/site/markdown/AccessingExternalDataInAsterixDB.md
index 7e49a0f..7319094 100644
--- a/asterix-doc/src/site/markdown/AccessingExternalDataInAsterixDB.md
+++ b/asterix-doc/src/site/markdown/AccessingExternalDataInAsterixDB.md
@@ -14,7 +14,7 @@
 _For constructing an example, we assume a single machine setup._
 
 Similar to a regular dataset, an external dataset has an associated datatype.  We shall first create the datatype associated with each record in Lineitem data.
-Paste the following in the query textbox on the webpage at http://127.0.0.1 and hit 'Execute'.
+Paste the following in the query textbox on the webpage at http://127.0.0.1:19001 and hit 'Execute'.
 
 
         create dataverse ExternalFileDemo;
@@ -100,7 +100,7 @@
         127.0.0.1:///home/joe/lineitem.tbl.
 
 
-In your web-browser, navigate to 127.0.0.1 and paste the above to the query text box. Finally hit 'Execute'.
+In your web-browser, navigate to 127.0.0.1:19001 and paste the above to the query text box. Finally hit 'Execute'.
 
 Next we move over to the the section [Writing Queries against an External Dataset](#Writing_Queries_against_an_External_Dataset) and try a sample query against the external dataset.
 
diff --git a/asterix-doc/src/site/markdown/AsterixQueryLanguageReference.md b/asterix-doc/src/site/markdown/AsterixQueryLanguageReference.md
index c9ed3e2..1ed876e 100644
--- a/asterix-doc/src/site/markdown/AsterixQueryLanguageReference.md
+++ b/asterix-doc/src/site/markdown/AsterixQueryLanguageReference.md
@@ -3,38 +3,79 @@
 
 This document provides an overview of the Asterix Query language.
 
+
 ## 2. Expressions
 
     Expression ::= ( OperatorExpr | IfThenElse | FLWOGR | QuantifiedExpression )
 
+
 ### Primary Expressions
 
-    PrimaryExpr ::= Literal | VariableRef | ParenthesizedExpression | FunctionCallExpr
-                  | DatasetAccessExpression | ListConstructor | RecordConstructor
+    PrimaryExpr ::= Literal
+                  | VariableRef
+                  | ParenthesizedExpression
+                  | FunctionCallExpr
+                  | DatasetAccessExpression
+                  | ListConstructor
+                  | RecordConstructor
+                  
 
 #### Literals
 
-    Literal ::= StringLiteral | <INTEGER_LITERAL> | <FLOAT_LITERAL> | <DOUBLE_LITERAL> | <NULL> | <TRUE> | <FALSE>
+    Literal ::= StringLiteral
+              | <INTEGER_LITERAL>
+              | <FLOAT_LITERAL>
+              | <DOUBLE_LITERAL>
+              | "null"
+              | "true"
+              | "false"
     StringLiteral ::= <STRING_LITERAL>
 
+##### Examples
+
+    "a string"
+    42
+
+
 #### Variable References
 
     VariableRef ::= <VARIABLE>
+
+##### Example
+
+    $id  
     
+
 #### Parenthesized Expressions
     
-    ParenthesizedExpression ::= <LEFTPAREN> Expression <RIGHTPAREN>
+    ParenthesizedExpression ::= "(" Expression ")"
+
+##### Example
+
+    ( 1 + 1 )
+
 
 #### Function Calls
 
-    FunctionCallExpr ::= FunctionOrTypeName <LEFTPAREN> ( Expression ( "," Expression )* )? <RIGHTPAREN>
-    
+    FunctionCallExpr ::= FunctionOrTypeName "(" ( Expression ( "," Expression )* )? ")"
+
+##### Example
+
+    string-length("a string")
+
+
 #### Dataset Access
 
-    DatasetAccessExpression ::= <DATASET> ( ( Identifier ( "." Identifier )? )
-                              | ( <LEFTPAREN> Expression ( "," Expression )* <RIGHTPAREN> ) )
+    DatasetAccessExpression ::= "dataset" ( ( Identifier ( "." Identifier )? )
+                              | ( "(" Expression ")" ) )
     Identifier              ::= <IDENTIFIER> | StringLiteral
 
+##### Examples
+
+    dataset customers
+    dataset (string-join("customers", $country))
+    
+
 #### Constructors
 
     ListConstructor          ::= ( OrderedListConstructor | UnorderedListConstructor )
@@ -43,20 +84,51 @@
     RecordConstructor        ::= "{" ( FieldBinding ( "," FieldBinding )* )? "}"
     FieldBinding             ::= Expression ":" Expression
 
+##### Examples
+
+    [ "a", "b", "c" ]
+    
+    {{ 42, "forty-two", "AsterixDB!" }}
+    
+    {
+      "project name"    : "AsterixDB"
+      "project members" : {{ "vinayakb", "dtabass", "chenli" }}
+    } 
+
+
 ### Path Expressions
 
     ValueExpr ::= PrimaryExpr ( Field | Index )*
     Field     ::= "." Identifier
     Index     ::= "[" ( Expression | "?" ) "]"
 
+##### Examples
+
+    { "list" : [ "a", "b", "c"] }.list
+    
+    [ "a", "b", "c"][2]
+    
+    { "list" : [ "a", "b", "c"] }.list[2]
+
+
 ### Logical Expressions
 
     OperatorExpr ::= AndExpr ( "or" AndExpr )*
     AndExpr      ::= RelExpr ( "and" RelExpr )*
     
+##### Example
+
+    $a > 3 and $a < 5
+    
+
 ### Comparison Expressions
 
     RelExpr ::= AddExpr ( ( "<" | ">" | "<=" | ">=" | "=" | "!=" | "~=" ) AddExpr )?
+    
+##### Example
+
+    5 > 3
+
 
 ### Arithmetic Expressions
 
@@ -64,6 +136,11 @@
     MultExpr ::= UnaryExpr ( ( "*" | "/" | "%" | <CARET> | "idiv" ) UnaryExpr )*
     UnaryExpr ::= ( ( "+" | "-" ) )? ValueExpr
 
+##### Example
+
+    3 ^ 2 + 4 ^ 2
+
+
 ###  FLWOGR Expression   
     
     FLWOGR         ::= ( ForClause | LetClause ) ( Clause )* "return" Expression
@@ -81,16 +158,99 @@
     Variable       ::= <VARIABLE>
 
 
+##### Example
+
+    for $user in dataset FacebookUsers
+    where $user.id = 8
+    return $user
+    
+##### Example
+
+    for $user in dataset FacebookUsers
+    for $message in dataset FacebookMessages
+    where $message.author-id = $user.id
+    return
+      {
+        "uname": $user.name,
+        "message": $message.message
+      }; 
+    
+##### Example
+
+    for $user in dataset FacebookUsers
+    let $messages := 
+      for $message in dataset FacebookMessages
+      where $message.author-id = $user.id
+      return $message.message
+    return
+      {
+        "uname": $user.name,
+        "messages": $messages
+      }; 
+      
+##### Example
+      
+      for $user in dataset TwitterUsers
+      order by $user.followers_count desc, $user.lang asc
+      return $user
+      
+* null is smaller than any other value
+
+##### Example
+
+      for $x in dataset FacebookMessages
+      let $messages := $x.message
+      group by $loc := $x.sender-location with $messages
+      return
+        {
+          "location" : $loc,
+          "message" : $messages
+        }
+
+* after group by only variables that are either in the group-by-list or in the with-list are in scope
+* the variables in the with-clause contain a collection of items after the group by clause  (all the values that the variable was bound to in the tuples that make up the group)
+* null is handled as a single value for grouping
+
+##### Example
+
+      for $user in dataset TwitterUsers
+      order by $user.followers_count desc
+      limit 2
+      return $user
+
+##### Example (currently not working)
+    
+      for $x in dataset FacebookMessages
+      distinct by $x.sender-location
+      return
+        {
+          "location" : $x.sender-location,
+          "message" : $x.message
+        }
+
+* every variable that is in-scope before the distinct clause is also in scope after the distinct clause
+* works a lot like group by, but for every variable that contains more than one value after the distinct-by clause, one value is picked non-deterministically
+* if the variable is in the disctict-by list, then value is deterministic
+* null is a single value
+    
 ### Conditional Expression
     
-    IfThenElse ::= "if" <LEFTPAREN> Expression <RIGHTPAREN> "then" Expression "else" Expression
+    IfThenElse ::= "if" "(" Expression ")" "then" Expression "else" Expression
+
+##### Example
+
+    if (2 < 3) then "yes" else "no"
 
 
 ### Quantified Expressions
     
     QuantifiedExpression ::= ( ( "some" ) | ( "every" ) ) Variable "in" Expression 
                              ( "," Variable "in" Expression )* "satisfies" Expression
+                             
+##### Examples
 
+    every $x in [ 1, 2, 3] satisfies $x < 3
+    some $x in [ 1, 2, 3] satisfies $x < 3
 
 ## 3. Statements
 
@@ -103,7 +263,6 @@
                       | SetStatement
                       | InsertStatement
                       | DeleteStatement
-                      | FeedStatement
                       | Query
     
 ### Declarations    
@@ -111,21 +270,48 @@
     DataverseDeclaration ::= "use" "dataverse" Identifier
     SetStatement         ::= "set" Identifier StringLiteral
     FunctionDeclaration  ::= "declare" "function" Identifier ParameterList "{" Expression "}"
-    ParameterList        ::= <LEFTPAREN> ( <VARIABLE> ( "," <VARIABLE> )* )? <RIGHTPAREN>
+    ParameterList        ::= "(" ( <VARIABLE> ( "," <VARIABLE> )* )? ")"
+
+##### Example
+
+    use dataverse TinySocial;
+    
+##### Example
+
+    set simfunction "jaccard";
+    set simthreshold "0.6f"; 
+
+##### Example
+
+    set simfunction "jaccard";    
+    set simthreshold "0.6f"; 
+    
+##### Example
+    
+    declare function add($a, $b) {
+      $a + $b
+    };
 
 ### Lifecycle Management Statements
 
-    CreateStatement ::= "create" ( TypeSpecification | DatasetSpecification | IndexSpecification | DataverseSpecification | FunctionSpecification )
+    CreateStatement ::= "create" ( DataverseSpecification
+                                 | TypeSpecification
+                                 | DatasetSpecification
+                                 | IndexSpecification
+                                 | FunctionSpecification )
 
-    DropStatement       ::= "drop" ( <DATASET> QualifiedName IfExists
-                                   | "index" DoubleQualifiedName IfExists
-                                   | "type" FunctionOrTypeName IfExists
-                                   | "dataverse" Identifier IfExists
-                                   | "function" FunctionSignature IfExists )
-    IfExists            ::= ( "if" "exists" )?
     QualifiedName       ::= Identifier ( "." Identifier )?
     DoubleQualifiedName ::= Identifier "." Identifier ( "." Identifier )?
 
+#### Dataverses
+
+    DataverseSpecification ::= "dataverse" Identifier IfNotExists ( "with format" StringLiteral )?
+    
+
+##### Example
+
+    create dataverse TinySocial;
+
 #### Types
 
     TypeSpecification    ::= "type" FunctionOrTypeName IfNotExists "as" TypeExpr
@@ -137,55 +323,140 @@
     TypeReference        ::= Identifier
     OrderedListTypeDef   ::= "[" ( TypeExpr ) "]"
     UnorderedListTypeDef ::= "{{" ( TypeExpr ) "}}"
-    
+
+##### Example
+
+    create type FacebookUserType as closed {
+      id: int32,
+      alias: string,
+      name: string,
+      user-since: datetime,
+      friend-ids: {{ int32 }},
+      employment: [EmploymentType]
+    }
+
+
 #### Datasets
 
-    DatasetSpecification ::= "external" <DATASET> QualifiedName <LEFTPAREN> Identifier <RIGHTPAREN> IfNotExists 
-                                 "using" AdapterName Configuration ( "hints" Properties )? 
-                           | "feed" <DATASET> QualifiedName <LEFTPAREN> Identifier <RIGHTPAREN> IfNotExists
-                                 "using" AdapterName Configuration ( ApplyFunction )? PrimaryKey ( "on" Identifier )? ( "hints" Properties )? 
-                           | "internal"? <DATASET> QualifiedName <LEFTPAREN> Identifier <RIGHTPAREN> IfNotExists
-                             PrimaryKey ( "on" Identifier )? ( "hints" Properties )?
+    DatasetSpecification ::= "internal"? "dataset" QualifiedName "(" Identifier ")" IfNotExists
+                             PrimaryKey ( "on" Identifier )? ( "hints" Properties )? 
+                           | "external" "dataset" QualifiedName "(" Identifier ")" IfNotExists 
+                             "using" AdapterName Configuration ( "hints" Properties )?
     AdapterName          ::= Identifier
-    Configuration        ::= <LEFTPAREN> ( KeyValuePair ( "," KeyValuePair )* )? <RIGHTPAREN>
-    KeyValuePair         ::= <LEFTPAREN> StringLiteral "=" StringLiteral <RIGHTPAREN>
-    Properties           ::= ( <LEFTPAREN> Property ( "," Property )* <RIGHTPAREN> )?
+    Configuration        ::= "(" ( KeyValuePair ( "," KeyValuePair )* )? ")"
+    KeyValuePair         ::= "(" StringLiteral "=" StringLiteral ")"
+    Properties           ::= ( "(" Property ( "," Property )* ")" )?
     Property             ::= Identifier "=" ( StringLiteral | <INTEGER_LITERAL> )
     ApplyFunction        ::= "apply" "function" FunctionSignature
     FunctionSignature    ::= FunctionOrTypeName "@" <INTEGER_LITERAL>
     PrimaryKey           ::= "primary" "key" Identifier ( "," Identifier )*
 
+
+##### Example
+    create internal dataset FacebookUsers(FacebookUserType) primary key id;
+
+##### Example
+
+    create external dataset Lineitem(LineitemType) using localfs (
+      ("path"="127.0.0.1://SOURCE_PATH"),
+      ("format"="delimited-text"),
+      ("delimiter"="|"));
+      
 #### Indices
 
-    IndexSpecification ::= "index" Identifier IfNotExists "on" QualifiedName <LEFTPAREN> ( Identifier ) ( "," Identifier )* <RIGHTPAREN> ( "type" IndexType )?
-    IndexType          ::= "btree" | "rtree" | "keyword" | "fuzzy keyword" | "ngram" <LEFTPAREN> <INTEGER_LITERAL> <RIGHTPAREN> | "fuzzy ngram" <LEFTPAREN> <INTEGER_LITERAL> <RIGHTPAREN>
+    IndexSpecification ::= "index" Identifier IfNotExists "on" QualifiedName 
+                           "(" ( Identifier ) ( "," Identifier )* ")" ( "type" IndexType )?
+    IndexType          ::= "btree"
+                         | "rtree"
+                         | "keyword"
+                         | "fuzzy keyword"
+                         | "ngram" "(" <INTEGER_LITERAL> ")"
+                         | "fuzzy ngram" "(" <INTEGER_LITERAL> ")"
 
-#### Dataverses
+##### Example
 
-    DataverseSpecification ::= "dataverse" Identifier IfNotExists ( "with format" StringLiteral )?
+    create index fbAuthorIdx on FacebookMessages(author-id) type btree;
+
+##### Example
+
+    create index fbSenderLocIndex on FacebookMessages(sender-location) type rtree;
+
+##### Example
+
+    create index fbMessageIdx on FacebookMessages(message) type keyword;
+
 
 #### Functions
 
-    FunctionSpecification ::= "function" FunctionOrTypeName IfNotExists ParameterList "{" Expression "}"    
+    FunctionSpecification ::= "function" FunctionOrTypeName IfNotExists ParameterList "{" Expression "}"
+    
+##### Example
+    
+    create function add($a, $b) {
+      $a + $b
+    };
+    
+
+#### Removal
+
+    DropStatement       ::= "drop" ( "dataverse" Identifier IfExists
+                                   | "type" FunctionOrTypeName IfExists
+                                   | "dataset" QualifiedName IfExists
+                                   | "index" DoubleQualifiedName IfExists
+                                   | "function" FunctionSignature IfExists )
+    IfExists            ::= ( "if" "exists" )?
+    
+##### Example
+
+    drop dataset FacebookUsers if exists;
+
+##### Example
+
+    drop index fbSenderLocIndex;
+
+##### Example
+
+    drop type FacebookUserType;
+    
+##### Example
+
+    drop dataverse TinySocial;
+
+##### Example
+
+    drop function add;
+    
 
 ### Import/Export Statements
 
-    LoadStatement  ::= "load" <DATASET> QualifiedName "using" AdapterName Configuration ( "pre-sorted" )?
+    LoadStatement  ::= "load" "dataset" QualifiedName "using" AdapterName Configuration ( "pre-sorted" )?
+    
+##### Example
+
+    load dataset FacebookUsers using localfs
+    (("path"="localhost:///Users/zuck/AsterixDB/load/fbu.adm"),("format"="adm"));
+
 
 ### Modification Statements
 
-    InsertStatement ::= "insert" "into" <DATASET> QualifiedName Query
-    DeleteStatement ::= "delete" Variable "from" <DATASET> QualifiedName ( "where" Expression )?
+    InsertStatement ::= "insert" "into" "dataset" QualifiedName Query
+    DeleteStatement ::= "delete" Variable "from" "dataset" QualifiedName ( "where" Expression )?
+    
+##### Example
 
-### Feed Management Statements
+    insert into dataset UsersCopy (for $user in dataset FacebookUsers return $user)
 
-    FeedStatement ::= "begin" "feed" QualifiedName
-                    | "suspend" "feed" QualifiedName
-                    | "resume" "feed" QualifiedName
-                    | "end" "feed" QualifiedName
-                    | "alter" "feed" QualifiedName "set" Configuration
+##### Example
+    
+    delete $user from dataset FacebookUsers where $user.id = 8;
+    
 
 ### Queries
 
     Query ::= Expression
     
+##### Example
+    
+    for $praise in {{ "great", "brilliant", "awesome" }}
+    return
+       string-concat(["AsterixDB is ", $praise])
diff --git a/asterix-doc/src/site/markdown/index.md b/asterix-doc/src/site/markdown/index.md
new file mode 100644
index 0000000..cba2fdc
--- /dev/null
+++ b/asterix-doc/src/site/markdown/index.md
@@ -0,0 +1 @@
+# AsterixDB
diff --git a/asterix-doc/src/site/site.xml b/asterix-doc/src/site/site.xml
new file mode 100644
index 0000000..6724153
--- /dev/null
+++ b/asterix-doc/src/site/site.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+
+<project name="AsterixDB" xmlns="http://maven.apache.org/DECORATION/1.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/DECORATION/1.0.0 http://maven.apache.org/xsd/decoration-1.0.0.xsd">
+  <!--
+  <bannerLeft>
+    <name>Asterix</name>
+    <src>http://asterix.ics.uci.edu/pic/img9.jpg</src>
+    <href>http://asterix.ics.uci.edu/</href>
+  </bannerLeft>
+  -->
+
+  <skin>
+    <groupId>org.apache.maven.skins</groupId>
+    <artifactId>maven-fluido-skin</artifactId>
+    <version>1.2.1</version>
+  </skin>
+  <custom>
+    <fluidoSkin>
+      <sideBarEnabled>true</sideBarEnabled>
+      <!-- <googlePlusOne /> -->
+    </fluidoSkin>
+  </custom>
+
+  <body>
+    <links>
+      <item name="Home" href="index.html"/>
+    </links>
+
+    <menu name="Documentation">
+      <item name="AsterixDB: A Big Data Management System" href="AsterixAlphaRelease.html"/>
+      <item name="Installing Asterix using Managix" href="InstallingAsterixUsingManagix.html"/>
+      <item name="AsterixDB 101: An ADM and AQL Primer" href="AdmAql101.html"/>
+      <item name="Asterix Data Model (ADM)" href="AsterixDBDataModel.html"/>
+      <item name="AsterixDB Functions" href="AsterixDBFunctions.html"/>
+      <item name="The Asterix Query Language" href="AsterixQueryLanguageReference.html"/>
+      <item name="AsterixDB Support of Similarity Queries" href="AsterixSimilarityQueries.html"/>
+      <item name="Accessing External Data in AsterixDB" href="AccessingExternalDataInAsterixDB.html"/>
+      <item name="REST API to AsterixDB" href="AsterixDBRestAPI.html"/>
+      <item name="(old AQL doc)" href="AsterixQueryLanguage.html"/>      
+    </menu>
+
+    <menu ref="reports"/>
+  </body>
+</project>
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java
index e4c7ba2..0ed3c78 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java
@@ -17,7 +17,6 @@
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
@@ -28,6 +27,7 @@
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.ListSet;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
 import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;
 import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourcePropertiesProvider;
@@ -219,7 +219,7 @@
                     if (n < 2) {
                         pp = new RandomPartitioningProperty(domain);
                     } else {
-                        Set<LogicalVariable> pvars = new HashSet<LogicalVariable>();
+                        Set<LogicalVariable> pvars = new ListSet<LogicalVariable>();
                         int i = 0;
                         for (LogicalVariable v : scanVariables) {
                             pvars.add(v);
@@ -239,7 +239,7 @@
                     if (n < 2) {
                         pp = new RandomPartitioningProperty(domain);
                     } else {
-                        Set<LogicalVariable> pvars = new HashSet<LogicalVariable>();
+                        Set<LogicalVariable> pvars = new ListSet<LogicalVariable>();
                         int i = 0;
                         for (LogicalVariable v : scanVariables) {
                             pvars.add(v);
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
index 865ab94..f795b17 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
@@ -62,8 +62,13 @@
                         return 1;
                 }
 
-                ATypeTag tag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(b1[s1]);
-                switch (tag) {
+                ATypeTag tag1 = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(b1[s1]);
+                ATypeTag tag2 = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(b2[s2]);
+                if (tag1 != tag2) {
+                    throw new IllegalStateException("The values of two inconsistent types (" + tag1 + " and " + tag2
+                            + ") cannot be compared!");
+                }
+                switch (tag1) {
                     case BOOLEAN: {
                         return ascBoolComp.compare(b1, s1 + 1, l1 - 1, b2, s2 + 1, l2 - 1);
                     }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/AObjectBinaryHashFunctionFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/AObjectBinaryHashFunctionFactory.java
index 495b41f..0946f1b 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/AObjectBinaryHashFunctionFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/AObjectBinaryHashFunctionFactory.java
@@ -1,14 +1,8 @@
 package edu.uci.ics.asterix.dataflow.data.nontagged.hash;
 
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.EnumDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
 import edu.uci.ics.hyracks.data.std.accessors.MurmurHash3BinaryHashFunctionFamily;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.data.std.primitive.FloatPointable;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
 
 public class AObjectBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {
 
@@ -22,56 +16,12 @@
     @Override
     public IBinaryHashFunction createBinaryHashFunction() {
         return new IBinaryHashFunction() {
-
-            private IBinaryHashFunction boolHash = BooleanBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction();
-            private IBinaryHashFunction intHash = new PointableBinaryHashFunctionFactory(IntegerPointable.FACTORY)
-                    .createBinaryHashFunction();
-            private IBinaryHashFunction longHash = LongBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction();
-            private IBinaryHashFunction floatHash = new PointableBinaryHashFunctionFactory(FloatPointable.FACTORY)
-                    .createBinaryHashFunction();
-            private IBinaryHashFunction stringHash = new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY)
-                    .createBinaryHashFunction();
-
-            private IBinaryHashFunction doubleHash = DoubleBinaryHashFunctionFactory.INSTANCE
-                    .createBinaryHashFunction();
-
             private IBinaryHashFunction genericBinaryHash = MurmurHash3BinaryHashFunctionFamily.INSTANCE
                     .createBinaryHashFunction(0);
 
             @Override
             public int hash(byte[] bytes, int offset, int length) {
-                ATypeTag tag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[offset]);
-                switch (tag) {
-                    case BOOLEAN: {
-                        return boolHash.hash(bytes, offset + 1, length - 1);
-                    }
-                    case TIME:
-                    case DATE:
-                    case YEARMONTHDURATION:
-                    case INT32: {
-                        return intHash.hash(bytes, offset + 1, length - 1);
-                    }
-                    case DATETIME:
-                    case DAYTIMEDURATION:
-                    case INT64: {
-                        return longHash.hash(bytes, offset + 1, length - 1);
-                    }
-                    case FLOAT: {
-                        return floatHash.hash(bytes, offset + 1, length - 1);
-                    }
-                    case DOUBLE: {
-                        return doubleHash.hash(bytes, offset + 1, length - 1);
-                    }
-                    case STRING: {
-                        return stringHash.hash(bytes, offset + 1, length - 1);
-                    }
-                    case NULL: {
-                        return 0;
-                    }
-                    default: {
-                        return genericBinaryHash.hash(bytes, offset + 1, length - 1);
-                    }
-                }
+                return genericBinaryHash.hash(bytes, offset, length);
             }
         };
     }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java
index 33698bd..76dd07d 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java
@@ -2,11 +2,6 @@
 
 import java.io.Serializable;
 
-import edu.uci.ics.asterix.dataflow.data.nontagged.hash.AObjectBinaryHashFunctionFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.hash.BooleanBinaryHashFunctionFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.hash.DoubleBinaryHashFunctionFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.hash.LongBinaryHashFunctionFactory;
-import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
@@ -16,7 +11,6 @@
 import edu.uci.ics.hyracks.data.std.primitive.FloatPointable;
 import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
 import edu.uci.ics.hyracks.data.std.primitive.RawUTF8StringPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
 
 public class AqlBinaryHashFunctionFactoryProvider implements IBinaryHashFunctionFactoryProvider, Serializable {
 
@@ -39,81 +33,6 @@
 
     @Override
     public IBinaryHashFunctionFactory getBinaryHashFunctionFactory(Object type) {
-        if (type == null) {
-            return AObjectBinaryHashFunctionFactory.INSTANCE;
-        }
-        IAType aqlType = (IAType) type;
-        switch (aqlType.getTypeTag()) {
-            case ANY:
-            case UNION: { // we could do smth better for nullable fields
-                return AObjectBinaryHashFunctionFactory.INSTANCE;
-            }
-            case NULL: {
-                return new IBinaryHashFunctionFactory() {
-
-                    private static final long serialVersionUID = 1L;
-
-                    @Override
-                    public IBinaryHashFunction createBinaryHashFunction() {
-                        return new IBinaryHashFunction() {
-
-                            @Override
-                            public int hash(byte[] bytes, int offset, int length) {
-                                return 0;
-                            }
-                        };
-                    }
-                };
-            }
-            case BOOLEAN: {
-                return addOffset(BooleanBinaryHashFunctionFactory.INSTANCE);
-            }
-            case DATE:
-            case TIME:
-            case YEARMONTHDURATION:
-            case INT32: {
-                return addOffset(new PointableBinaryHashFunctionFactory(IntegerPointable.FACTORY));
-            }
-            case DAYTIMEDURATION:
-            case DATETIME:
-            case INT64: {
-                return addOffset(LongBinaryHashFunctionFactory.INSTANCE);
-            }
-            case FLOAT: {
-                return addOffset(new PointableBinaryHashFunctionFactory(FloatPointable.FACTORY));
-            }
-            case DOUBLE: {
-                return addOffset(DoubleBinaryHashFunctionFactory.INSTANCE);
-            }
-            case STRING: {
-                return addOffset(new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY));
-            }
-            default: {
-                return addOffsetForGenericBinaryHash();
-            }
-        }
-    }
-
-    private IBinaryHashFunctionFactory addOffset(final IBinaryHashFunctionFactory inst) {
-        return new IBinaryHashFunctionFactory() {
-
-            private static final long serialVersionUID = 1L;
-
-            @Override
-            public IBinaryHashFunction createBinaryHashFunction() {
-                final IBinaryHashFunction bhf = inst.createBinaryHashFunction();
-                return new IBinaryHashFunction() {
-
-                    @Override
-                    public int hash(byte[] bytes, int offset, int length) {
-                        return bhf.hash(bytes, offset + 1, length);
-                    }
-                };
-            }
-        };
-    }
-
-    private IBinaryHashFunctionFactory addOffsetForGenericBinaryHash() {
         return new IBinaryHashFunctionFactory() {
 
             private static final long serialVersionUID = 1L;