Merge branch 'raman/asterix_lsm_stabilization_dependency'
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index d3b1795..911f6fd 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -384,11 +384,7 @@
         Dataset dataset = null;
         try {
             DatasetDecl dd = (DatasetDecl) stmt;
-            dataverseName = dd.getDataverse() != null ? dd.getDataverse().getValue()
-                    : activeDefaultDataverse != null ? activeDefaultDataverse.getDataverseName() : null;
-            if (dataverseName == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            dataverseName = getActiveDataverseName(dd.getDataverse());
             datasetName = dd.getName().getValue();
 
             DatasetType dsType = dd.getDatasetType();
@@ -553,11 +549,7 @@
         JobSpecification spec = null;
         try {
             CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
-            dataverseName = stmtCreateIndex.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : stmtCreateIndex.getDataverseName().getValue();
-            if (dataverseName == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            dataverseName = getActiveDataverseName(stmtCreateIndex.getDataverseName());
             datasetName = stmtCreateIndex.getDatasetName().getValue();
 
             Dataset ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
@@ -686,15 +678,11 @@
 
         try {
             TypeDecl stmtCreateType = (TypeDecl) stmt;
-            String dataverseName = stmtCreateType.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : stmtCreateType.getDataverseName().getValue();
-            if (dataverseName == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            String dataverseName = getActiveDataverseName(stmtCreateType.getDataverseName());
             String typeName = stmtCreateType.getIdent().getValue();
             Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
             if (dv == null) {
-                throw new AlgebricksException("Unknonw dataverse " + dataverseName);
+                throw new AlgebricksException("Unknown dataverse " + dataverseName);
             }
             Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
             if (dt != null) {
@@ -849,11 +837,7 @@
         List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
         try {
             DropStatement stmtDelete = (DropStatement) stmt;
-            dataverseName = stmtDelete.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : stmtDelete.getDataverseName().getValue();
-            if (dataverseName == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            dataverseName = getActiveDataverseName(stmtDelete.getDataverseName());
             datasetName = stmtDelete.getDatasetName().getValue();
 
             Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
@@ -960,11 +944,7 @@
         try {
             IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
             datasetName = stmtIndexDrop.getDatasetName().getValue();
-            dataverseName = stmtIndexDrop.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : stmtIndexDrop.getDataverseName().getValue();
-            if (dataverseName == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            dataverseName = getActiveDataverseName(stmtIndexDrop.getDataverseName());
 
             Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
             if (ds == null) {
@@ -1062,11 +1042,7 @@
 
         try {
             TypeDropStatement stmtTypeDrop = (TypeDropStatement) stmt;
-            String dataverseName = stmtTypeDrop.getDataverseName() == null ? (activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName()) : stmtTypeDrop.getDataverseName().getValue();
-            if (dataverseName == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            String dataverseName = getActiveDataverseName(stmtTypeDrop.getDataverseName());
             String typeName = stmtTypeDrop.getTypeName().getValue();
             Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
             if (dt == null) {
@@ -1117,11 +1093,7 @@
 
         try {
             CreateFunctionStatement cfs = (CreateFunctionStatement) stmt;
-            String dataverse = cfs.getSignature().getNamespace() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : cfs.getSignature().getNamespace();
-            if (dataverse == null) {
-                throw new AlgebricksException(" dataverse not specified ");
-            }
+            String dataverse = getActiveDataverseName(cfs.getSignature().getNamespace());
             Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
             if (dv == null) {
                 throw new AlgebricksException("There is no dataverse with this name " + dataverse + ".");
@@ -1174,8 +1146,7 @@
         List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
         try {
             LoadFromFileStatement loadStmt = (LoadFromFileStatement) stmt;
-            String dataverseName = loadStmt.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : loadStmt.getDataverseName().getValue();
+            String dataverseName = getActiveDataverseName(loadStmt.getDataverseName());
             CompiledLoadFromFileStatement cls = new CompiledLoadFromFileStatement(dataverseName, loadStmt
                     .getDatasetName().getValue(), loadStmt.getAdapter(), loadStmt.getProperties(),
                     loadStmt.dataIsAlreadySorted());
@@ -1223,8 +1194,7 @@
         try {
             metadataProvider.setWriteTransaction(true);
             WriteFromQueryResultStatement st1 = (WriteFromQueryResultStatement) stmt;
-            String dataverseName = st1.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : st1.getDataverseName().getValue();
+            String dataverseName = getActiveDataverseName(st1.getDataverseName());
             CompiledWriteFromQueryResultStatement clfrqs = new CompiledWriteFromQueryResultStatement(dataverseName, st1
                     .getDatasetName().getValue(), st1.getQuery(), st1.getVarCounter());
 
@@ -1255,8 +1225,7 @@
         try {
             metadataProvider.setWriteTransaction(true);
             InsertStatement stmtInsert = (InsertStatement) stmt;
-            String dataverseName = stmtInsert.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : stmtInsert.getDataverseName().getValue();
+            String dataverseName = getActiveDataverseName(stmtInsert.getDataverseName());
             CompiledInsertStatement clfrqs = new CompiledInsertStatement(dataverseName, stmtInsert.getDatasetName()
                     .getValue(), stmtInsert.getQuery(), stmtInsert.getVarCounter());
             JobSpecification compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
@@ -1289,8 +1258,7 @@
         try {
             metadataProvider.setWriteTransaction(true);
             DeleteStatement stmtDelete = (DeleteStatement) stmt;
-            String dataverseName = stmtDelete.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : stmtDelete.getDataverseName().getValue();
+            String dataverseName = getActiveDataverseName(stmtDelete.getDataverseName());
             CompiledDeleteStatement clfrqs = new CompiledDeleteStatement(stmtDelete.getVariableExpr(), dataverseName,
                     stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getVarCounter(),
                     metadataProvider);
@@ -1340,8 +1308,7 @@
 
         try {
             BeginFeedStatement bfs = (BeginFeedStatement) stmt;
-            String dataverseName = bfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : bfs.getDataverseName().getValue();
+            String dataverseName = getActiveDataverseName(bfs.getDataverseName());
 
             CompiledBeginFeedStatement cbfs = new CompiledBeginFeedStatement(dataverseName, bfs.getDatasetName()
                     .getValue(), bfs.getQuery(), bfs.getVarCounter());
@@ -1389,8 +1356,7 @@
 
         try {
             ControlFeedStatement cfs = (ControlFeedStatement) stmt;
-            String dataverseName = cfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                    : activeDefaultDataverse.getDataverseName() : cfs.getDatasetName().getValue();
+            String dataverseName = getActiveDataverseName(cfs.getDataverseName());
             CompiledControlFeedStatement clcfs = new CompiledControlFeedStatement(cfs.getOperationType(),
                     dataverseName, cfs.getDatasetName().getValue(), cfs.getAlterAdapterConfParams());
             JobSpecification jobSpec = FeedOperations.buildControlFeedJobSpec(clcfs, metadataProvider);
@@ -1536,6 +1502,23 @@
         return format;
     }
 
+    private String getActiveDataverseName(String dataverse)
+            throws AlgebricksException {
+        if (dataverse != null) {
+            return dataverse;
+        }
+        if (activeDefaultDataverse != null) {
+            return activeDefaultDataverse.getDataverseName();
+        }
+        throw new AlgebricksException("dataverse not specified");
+    }
+    
+    private String getActiveDataverseName(Identifier dataverse)
+            throws AlgebricksException {
+        return getActiveDataverseName(
+                dataverse != null ? dataverse.getValue() : null);
+    }
+    
     private void acquireWriteLatch() {
         MetadataManager.INSTANCE.acquireWriteLatch();
     }
diff --git a/asterix-app/src/test/resources/AQLTS/queries/3.aql b/asterix-app/src/test/resources/AQLTS/queries/3.aql
deleted file mode 100644
index 3d71b27..0000000
--- a/asterix-app/src/test/resources/AQLTS/queries/3.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-for $current_sig in dataset('SIGroup')
-where
-   every $old_sig in dataset('SIGroup', getCurrentDateTime() - dtduration(0, 24, 0, 0))
-   satisfies $old_sig.name != $current_sig.name
-return $current_sig
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.1.ddl.aql
new file mode 100644
index 0000000..73d63c9
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.1.ddl.aql
@@ -0,0 +1,19 @@
+/*
+ * Description  : create a dataset using year-month-duration for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Emp as open {
+id:year-month-duration,
+dur:year-month-duration,
+name:string
+}
+
+create dataset Employee(Emp) primary key id;
+create index TestSecondIndex on Employee(dur);
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.2.update.aql
new file mode 100644
index 0000000..bd96595
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.2.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using year-month-duration for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+insert into dataset Employee({"id":year-month-duration("P16Y"), "dur":year-month-duration("-P23Y"), "name": "John"})
+insert into dataset Employee({"id":year-month-duration("-P37M"), "dur":year-month-duration("P1Y48M"), "name": "Alex"})
+insert into dataset Employee({"id":year-month-duration("P2013Y"), "dur":year-month-duration("P7M"), "name": "Bob"})
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.3.query.aql
new file mode 100644
index 0000000..8244c6d
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_0/issue_363_temporal_sec_key_0.3.query.aql
@@ -0,0 +1,13 @@
+/*
+ * Description  : create a dataset using year-month-duration for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+for $x in dataset('Employee')
+where $x.dur > year-month-duration("P1Y")
+return $x
+
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.1.ddl.aql
new file mode 100644
index 0000000..9697c32
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.1.ddl.aql
@@ -0,0 +1,19 @@
+/*
+ * Description  : create a dataset using datetime for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Emp as open {
+id:datetime,
+dt:datetime,
+name:string
+}
+
+create dataset Employee(Emp) primary key id;
+create index TestSecondIndex on Employee(dt);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.2.update.aql
new file mode 100644
index 0000000..d66da92
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.2.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using datetime for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+insert into dataset Employee({"id":datetime("1900-01-01T00:00:00"), "dt":datetime("1900-01-01T00:00:00"), "name": "John"})
+insert into dataset Employee({"id":datetime("2000-01-01T00:00:00"), "dt":datetime("2000-01-01T00:00:00"), "name": "Alex"})
+insert into dataset Employee({"id":datetime("2013-01-01T00:00:00"), "dt":datetime("2013-01-01T00:00:00"), "name": "Bob"})
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.3.query.aql
new file mode 100644
index 0000000..eb92aea
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_1/issue_363_temporal_sec_key_1.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using datetime for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+for $x in dataset('Employee')
+where $x.dt > datetime("2007-07-07T07:07:07.777Z")
+return $x
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.1.ddl.aql
new file mode 100644
index 0000000..d3c19f4
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.1.ddl.aql
@@ -0,0 +1,19 @@
+/*
+ * Description  : create a dataset using time for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Emp as open {
+id:time,
+tm:time,
+name:string
+}
+
+create dataset Employee(Emp) primary key id;
+create index TestSecondIndex on Employee(tm);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.2.update.aql
new file mode 100644
index 0000000..6f44d6c
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.2.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using time for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+insert into dataset Employee({"id":time("03:10:00.493Z"), "tm":time("03:10:00.493Z"), "name": "John"})
+insert into dataset Employee({"id":time("20:37:19+08:00"), "tm":time("20:37:19+08:00"), "name": "Alex"})
+insert into dataset Employee({"id":time("21:39:17.948-04:00"), "tm":time("21:39:17.948-04:00"), "name": "Bob"})
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.3.query.aql
new file mode 100644
index 0000000..f0392b1
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_2/issue_363_temporal_sec_key_2.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using time for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+for $x in dataset('Employee')
+where $x.tm > time("07:07:07.777Z")
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.1.ddl.aql
new file mode 100644
index 0000000..ab88978
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.1.ddl.aql
@@ -0,0 +1,19 @@
+/*
+ * Description  : create a dataset using date for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Emp as open {
+id:date,
+dt:date,
+name:string
+}
+
+create dataset Employee(Emp) primary key id;
+create index TestSecondIndex on Employee(dt);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.2.update.aql
new file mode 100644
index 0000000..b8d415e
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.2.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using date for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+insert into dataset Employee({"id":date("2010-01-01"), "dt":date("2010-01-01"), "name": "John"})
+insert into dataset Employee({"id":date("-1912-10-11"), "dt":date("-1912-10-11"), "name": "Alex"})
+insert into dataset Employee({"id":date("0732-02-02"), "dt":date("0732-02-02"), "name": "Bob"})
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.3.query.aql
new file mode 100644
index 0000000..128a2a6
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_3/issue_363_temporal_sec_key_3.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using date for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+for $x in dataset('Employee')
+where $x.dt > date("2007-07-07")
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.1.ddl.aql
new file mode 100644
index 0000000..c6a8836
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.1.ddl.aql
@@ -0,0 +1,19 @@
+/*
+ * Description  : create a dataset using day-time-duration for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Emp as open {
+id:day-time-duration,
+dur:day-time-duration,
+name:string
+}
+
+create dataset Employee(Emp) primary key id;
+create index TestSecondIndex on Employee(dt);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.2.update.aql
new file mode 100644
index 0000000..d5aafe9
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.2.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using day-time-duration for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+
+use dataverse test;
+
+insert into dataset Employee({"id":day-time-duration("P380DT983M"), "dur":day-time-duration("P380DT983M"), "name": "John"})
+insert into dataset Employee({"id":day-time-duration("-P3829H849.392S"), "dur":day-time-duration("-P3829H849.392S"), "name": "Alex"})
+insert into dataset Employee({"id":day-time-duration("PT93847M0.392S"), "dur":day-time-duration("PT93847M0.392S"), "name": "Bob"})
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.3.query.aql
new file mode 100644
index 0000000..dbca351
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_363_temporal_sec_key_4/issue_363_temporal_sec_key_4.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a dataset using day-time-duration for the secondary index 
+ * Expected Res : Success
+ * Date         : 26 May 2013
+ * Issue        : 461
+ */
+ 
+use dataverse test;
+
+for $x in dataset('Employee')
+where $x.dur > day-time-duration("P350D")
+return $x
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_0/issue363_temporal_sec_key_0.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_0/issue363_temporal_sec_key_0.1.adm
new file mode 100644
index 0000000..f1a3b14
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_0/issue363_temporal_sec_key_0.1.adm
@@ -0,0 +1 @@
+{ "id": year-month-duration("-P3Y1M"), "dur": year-month-duration("P5Y"), "name": "Alex" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_1/issue363_temporal_sec_key_1.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_1/issue363_temporal_sec_key_1.1.adm
new file mode 100644
index 0000000..1703ccd
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_1/issue363_temporal_sec_key_1.1.adm
@@ -0,0 +1 @@
+{ "id": datetime("2013-01-01T00:00:00.000Z"), "dt": datetime("2013-01-01T00:00:00.000Z"), "name": "Bob" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_2/issue363_temporal_sec_key_2.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_2/issue363_temporal_sec_key_2.1.adm
new file mode 100644
index 0000000..1e32e45
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_2/issue363_temporal_sec_key_2.1.adm
@@ -0,0 +1,2 @@
+{ "id": time("12:37:19.000Z"), "tm": time("12:37:19.000Z"), "name": "Alex" }
+{ "id": time("01:39:17.948Z"), "tm": time("01:39:17.948Z"), "name": "Bob" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_3/issue363_temporal_sec_key_3.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_3/issue363_temporal_sec_key_3.1.adm
new file mode 100644
index 0000000..fb4a286
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_3/issue363_temporal_sec_key_3.1.adm
@@ -0,0 +1 @@
+{ "id": date("2010-01-01"), "dt": date("2010-01-01"), "name": "John" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_4/issue363_temporal_sec_key_4.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_4/issue363_temporal_sec_key_4.1.adm
new file mode 100644
index 0000000..dacaf15
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_363_temporal_sec_key_4/issue363_temporal_sec_key_4.1.adm
@@ -0,0 +1 @@
+{ "id": day-time-duration("P380DT16H23M"), "dur": day-time-duration("P380DT16H23M"), "name": "John" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/testsuite.xml b/asterix-app/src/test/resources/metadata/testsuite.xml
index 63487d4..e9ed34d 100644
--- a/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -175,6 +175,31 @@
         <output-dir compare="Text">issue_363_temporal_key_4</output-dir>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_363_temporal_sec_key_0">
+        <output-dir compare="Text">issue_363_temporal_sec_key_0</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_363_temporal_sec_key_1">
+        <output-dir compare="Text">issue_363_temporal_sec_key_1</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_363_temporal_sec_key_2">
+        <output-dir compare="Text">issue_363_temporal_sec_key_2</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_363_temporal_sec_key_3">
+        <output-dir compare="Text">issue_363_temporal_sec_key_3</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="issue_363_temporal_sec_key_4">
+        <output-dir compare="Text">issue_363_temporal_sec_key_4</output-dir>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="exception">
     <test-case FilePath="exception">
diff --git a/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan b/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan
index 494b208..76fd0bd 100644
--- a/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan
@@ -11,11 +11,11 @@
                           -- NESTED_TUPLE_SOURCE  |LOCAL|
                     }
               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                -- STABLE_SORT [$$23(ASC), $$6(ASC)]  |PARTITIONED|
+                -- STABLE_SORT [$$23(ASC), $$4(ASC)]  |PARTITIONED|
                   -- HASH_PARTITION_EXCHANGE [$$23]  |PARTITIONED|
                     -- STREAM_PROJECT  |PARTITIONED|
                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                        -- HYBRID_HASH_JOIN [$$1][$$6]  |PARTITIONED|
+                        -- HYBRID_HASH_JOIN [$$1][$$4]  |PARTITIONED|
                           -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
                             -- STREAM_PROJECT  |PARTITIONED|
                               -- UNNEST  |PARTITIONED|
@@ -25,7 +25,7 @@
                                       -- DATASOURCE_SCAN  |PARTITIONED|
                                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                           -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                          -- HASH_PARTITION_EXCHANGE [$$6]  |PARTITIONED|
+                          -- HASH_PARTITION_EXCHANGE [$$4]  |PARTITIONED|
                             -- RUNNING_AGGREGATE  |PARTITIONED|
                               -- STREAM_PROJECT  |PARTITIONED|
                                 -- SORT_MERGE_EXCHANGE [$$24(DESC) ]  |PARTITIONED|
@@ -37,13 +37,13 @@
                                                   -- NESTED_TUPLE_SOURCE  |LOCAL|
                                               }
                                         -- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$30(ASC)] HASH:[$$30]  |PARTITIONED|
-                                          -- PRE_CLUSTERED_GROUP_BY[$$5]  |PARTITIONED|
+                                          -- PRE_CLUSTERED_GROUP_BY[$$3]  |PARTITIONED|
                                                   {
                                                     -- AGGREGATE  |LOCAL|
                                                       -- NESTED_TUPLE_SOURCE  |LOCAL|
                                                   }
                                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                              -- STABLE_SORT [$$5(ASC)]  |PARTITIONED|
+                                              -- STABLE_SORT [$$3(ASC)]  |PARTITIONED|
                                                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                   -- STREAM_PROJECT  |PARTITIONED|
                                                     -- UNNEST  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inlined_q18_large_volume_customer.plan b/asterix-app/src/test/resources/optimizerts/results/inlined_q18_large_volume_customer.plan
index 0713f96..0c7b95d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inlined_q18_large_volume_customer.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inlined_q18_large_volume_customer.plan
@@ -28,7 +28,7 @@
                                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                         -- STREAM_PROJECT  |PARTITIONED|
                                           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                            -- HYBRID_HASH_JOIN [$$57][$$4]  |PARTITIONED|
+                                            -- HYBRID_HASH_JOIN [$$57][$$3]  |PARTITIONED|
                                               -- HASH_PARTITION_EXCHANGE [$$57]  |PARTITIONED|
                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
@@ -47,7 +47,7 @@
                                                               -- DATASOURCE_SCAN  |PARTITIONED|
                                                                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                   -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                              -- HASH_PARTITION_EXCHANGE [$$4]  |PARTITIONED|
+                                              -- HASH_PARTITION_EXCHANGE [$$3]  |PARTITIONED|
                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                   -- STREAM_SELECT  |PARTITIONED|
                                                     -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.3.query.aql
index 81d6cf6..02ff97d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.3.query.aql
@@ -11,6 +11,7 @@
 set simthreshold "3";
 
 for $fbu in dataset FacebookUsers
+order by $fbu.id
 return {
     "id": $fbu.id,
     "name": $fbu.name,
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue377/query-issue377.1.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue377/query-issue377.1.adm
index 3944bcc..1a7caf5 100644
--- a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue377/query-issue377.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue377/query-issue377.1.adm
@@ -1,10 +1,14 @@
 { "id": 9142198, "similar-users": [  ], "name": "SherryFea" }
 { "id": 9313492, "similar-users": [  ], "name": "TeraWolfe" }
 { "id": 9478720, "similar-users": [  ], "name": "AngeliaKettlewell" }
+{ "id": 9510451, "similar-users": [  ], "name": "ChuckFinck" }
 { "id": 9594523, "similar-users": [  ], "name": "TamWillcox" }
 { "id": 9629395, "similar-users": [  ], "name": "JuliusWire" }
 { "id": 9988417, "similar-users": [  ], "name": "ColineLane" }
+{ "id": 10001080, "similar-users": [  ], "name": "GarrettBode" }
 { "id": 10179538, "similar-users": [  ], "name": "OrlandoBaxter" }
+{ "id": 10272571, "similar-users": [  ], "name": "JarrettGoldvogel" }
+{ "id": 10307032, "similar-users": [  ], "name": "QuentinSauter" }
 { "id": 10361965, "similar-users": [  ], "name": "ArlenFlick" }
 { "id": 10394488, "similar-users": [  ], "name": "OswaldRay" }
 { "id": 10423588, "similar-users": [  ], "name": "ShirleneRuch" }
@@ -13,17 +17,13 @@
 { "id": 10733617, "similar-users": [  ], "name": "LeonardoKight" }
 { "id": 10874791, "similar-users": [  ], "name": "HaydeeGarratt" }
 { "id": 10957867, "similar-users": [  ], "name": "ZachOppenheimer" }
+{ "id": 11061631, "similar-users": [  ], "name": "MaxeneKellogg" }
 { "id": 11068231, "similar-users": [  ], "name": "DinahSwink" }
+{ "id": 11140213, "similar-users": [  ], "name": "MontgomeryWhittier" }
 { "id": 11307946, "similar-users": [  ], "name": "HelgaStough" }
 { "id": 11381089, "similar-users": [  ], "name": "EarleneAmmons" }
-{ "id": 11951098, "similar-users": [  ], "name": "TeraByers" }
-{ "id": 11954992, "similar-users": [  ], "name": "CaitlinLangston" }
-{ "id": 9510451, "similar-users": [  ], "name": "ChuckFinck" }
-{ "id": 10001080, "similar-users": [  ], "name": "GarrettBode" }
-{ "id": 10272571, "similar-users": [  ], "name": "JarrettGoldvogel" }
-{ "id": 10307032, "similar-users": [  ], "name": "QuentinSauter" }
-{ "id": 11061631, "similar-users": [  ], "name": "MaxeneKellogg" }
-{ "id": 11140213, "similar-users": [  ], "name": "MontgomeryWhittier" }
 { "id": 11447332, "similar-users": [  ], "name": "SherisseMaugham" }
 { "id": 11570326, "similar-users": [  ], "name": "LindenFilby" }
 { "id": 11675221, "similar-users": [  ], "name": "CalantheGearhart" }
+{ "id": 11951098, "similar-users": [  ], "name": "TeraByers" }
+{ "id": 11954992, "similar-users": [  ], "name": "CaitlinLangston" }
diff --git a/asterix-aql/src/main/javacc/AQL.jj b/asterix-aql/src/main/javacc/AQL.jj
index 2a5f534..a19a15f 100644
--- a/asterix-aql/src/main/javacc/AQL.jj
+++ b/asterix-aql/src/main/javacc/AQL.jj
@@ -1713,49 +1713,34 @@
 
 Expression DatasetAccessExpression() throws ParseException:
 {
-  CallExpr callExpr;
-  List<Expression> argList = new ArrayList<Expression>();
   String funcName;
-  String dataverse;
   String arg1 = null;
   String arg2 = null;
-  LiteralExpr ds;
   Expression nameArg;
-  int arity = 0;
 }
 {  
   <DATASET>
     {
-      dataverse = MetadataConstants.METADATA_DATAVERSE_NAME;
       funcName = token.image;
     }
   ( ( arg1 = Identifier() ( "." arg2 = Identifier() )? ) 
     {
       String name = arg2 == null ? arg1 : arg1 + "." + arg2;
-      ds = new LiteralExpr();
+      LiteralExpr ds = new LiteralExpr();
       ds.setValue( new StringLiteral(name) );
-      argList.add(ds);
-      arity ++;
+      nameArg = ds;
     }
-  | ( <LEFTPAREN> nameArg = Expression()
+  | ( <LEFTPAREN> nameArg = Expression() <RIGHTPAREN> ) )  
     {
+      String dataverse = MetadataConstants.METADATA_DATAVERSE_NAME;
+      FunctionSignature signature = lookupFunctionSignature(dataverse, funcName, 1);
+      if (signature == null) {
+        signature = new FunctionSignature(dataverse, funcName, 1);
+      }
+      List<Expression> argList = new ArrayList<Expression>();
       argList.add(nameArg);
-      arity ++;
+      return new CallExpr(signature, argList);
     }
-  ( "," nameArg = Expression()
-    {
-      argList.add(nameArg);
-      arity++;
-    }
-  )* <RIGHTPAREN> ) )  
-   {
-     FunctionSignature signature = lookupFunctionSignature(dataverse, funcName, arity);
-     if (signature == null) {
-       signature = new FunctionSignature(dataverse, funcName, arity);
-     }
-     callExpr = new CallExpr(signature,argList);
-     return callExpr;
-   }
 }
 
 Expression ParenthesizedExpression() throws ParseException:
@@ -1835,21 +1820,14 @@
 	extendCurrentScope();
 }
 {
-    "for" varExp = Variable()
-    {
-     	getCurrentScope().addNewVarSymbolToScope(varExp.getVar());
-	}
-	("at" varPos = Variable()
-	  {
-	     getCurrentScope().addNewVarSymbolToScope(varPos.getVar());
-	  } 
-	 )? 
-      "in" ( inExp = Expression() )
+    "for" varExp = Variable() ("at" varPos = Variable())?  "in" ( inExp = Expression() )
     {
       fc.setVarExpr(varExp);
+      getCurrentScope().addNewVarSymbolToScope(varExp.getVar());
       fc.setInExpr(inExp);
       if (varPos != null) {
         fc.setPosExpr(varPos);
+	    getCurrentScope().addNewVarSymbolToScope(varPos.getVar());
       }
       return fc;
     }
diff --git a/asterix-doc/src/site/markdown/AccessingExternalDataInAsterixDB.md b/asterix-doc/src/site/markdown/AccessingExternalDataInAsterixDB.md
index 7e49a0f..7319094 100644
--- a/asterix-doc/src/site/markdown/AccessingExternalDataInAsterixDB.md
+++ b/asterix-doc/src/site/markdown/AccessingExternalDataInAsterixDB.md
@@ -14,7 +14,7 @@
 _For constructing an example, we assume a single machine setup._
 
 Similar to a regular dataset, an external dataset has an associated datatype.  We shall first create the datatype associated with each record in Lineitem data.
-Paste the following in the query textbox on the webpage at http://127.0.0.1 and hit 'Execute'.
+Paste the following in the query textbox on the webpage at http://127.0.0.1:19001 and hit 'Execute'.
 
 
         create dataverse ExternalFileDemo;
@@ -100,7 +100,7 @@
         127.0.0.1:///home/joe/lineitem.tbl.
 
 
-In your web-browser, navigate to 127.0.0.1 and paste the above to the query text box. Finally hit 'Execute'.
+In your web-browser, navigate to 127.0.0.1:19001 and paste the above to the query text box. Finally hit 'Execute'.
 
 Next we move over to the the section [Writing Queries against an External Dataset](#Writing_Queries_against_an_External_Dataset) and try a sample query against the external dataset.