Merge branch 'master' into jarodwen/features/positionvar
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
index 5d52e7c..8756078 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
@@ -63,6 +63,7 @@
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.ComplexUnnestToProductRule;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.ConsolidateAssignsRule;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.ConsolidateSelectsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateGroupByEmptyKeyRule;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateSubplanRule;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.EnforceOrderByAfterSubplan;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.EnforceStructuralPropertiesRule;
@@ -82,6 +83,7 @@
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceGroupByForSubplanRule;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceProjectsRule;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.IsolateHyracksOperatorsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.LeftOuterJoinToInnerJoinRule;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.PullSelectOutOfEqJoin;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushAssignBelowUnionAllRule;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushAssignDownThroughProductRule;
@@ -162,6 +164,8 @@
         condPushDownAndJoinInference.add(new PushProperJoinThroughProduct());
         condPushDownAndJoinInference.add(new PushGroupByThroughProduct());
         condPushDownAndJoinInference.add(new NestGroupByRule());
+        condPushDownAndJoinInference.add(new EliminateGroupByEmptyKeyRule());
+        condPushDownAndJoinInference.add(new LeftOuterJoinToInnerJoinRule());
 
         return condPushDownAndJoinInference;
     }
diff --git a/asterix-app/src/test/resources/optimizerts/queries/q08_group_by.aql b/asterix-app/src/test/resources/optimizerts/queries/q08_group_by.aql
new file mode 100644
index 0000000..96b20a4
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/q08_group_by.aql
@@ -0,0 +1,149 @@
+drop dataverse q08_group_by if exists;
+
+create dataverse q08_group_by;
+
+use dataverse q08_group_by;
+
+create type LineItemType as closed {
+  l_orderkey: int32, 
+  l_partkey: int32, 
+  l_suppkey: int32, 
+  l_linenumber: int32, 
+  l_quantity: double, 
+  l_extendedprice: double,
+  l_discount: double, 
+  l_tax: double,
+  l_returnflag: string, 
+  l_linestatus: string, 
+  l_shipdate: string,
+  l_commitdate: string, 
+  l_receiptdate: string, 
+  l_shipinstruct: string, 
+  l_shipmode: string, 
+  l_comment: string
+}
+
+create type OrderType as closed {
+  o_orderkey: int32, 
+  o_custkey: int32, 
+  o_orderstatus: string, 
+  o_totalprice: double, 
+  o_orderdate: string, 
+  o_orderpriority: string,
+  o_clerk: string, 
+  o_shippriority: int32, 
+  o_comment: string
+}
+
+create type CustomerType as closed {
+  c_custkey: int32, 
+  c_name: string, 
+  c_address: string, 
+  c_nationkey: int32, 
+  c_phone: string, 
+  c_acctbal: double, 
+  c_mktsegment: string,
+  c_comment: string
+}
+
+create type SupplierType as closed {
+  s_suppkey: int32, 
+  s_name: string,
+  s_address: string,
+  s_nationkey: int32,
+  s_phone: string,
+  s_acctbal: double,
+  s_comment: string
+}
+
+create type NationType as closed {
+  n_nationkey: int32,
+  n_name: string,
+  n_regionkey: int32,
+  n_comment: string
+}
+
+create type RegionType as closed {
+  r_regionkey: int32,
+  r_name: string,
+  r_comment: string
+}
+
+create type PartType as closed {
+  p_partkey: int32, 
+  p_name: string, 
+  p_mfgr: string,
+  p_brand: string,
+  p_type: string,
+  p_size: int32,
+  p_container: string,
+  p_retailprice: double,
+  p_comment: string
+}
+
+create dataset LineItem(LineItemType)
+  primary key l_orderkey, l_linenumber;
+create dataset Orders(OrderType)
+  primary key o_orderkey;
+create dataset Customer(CustomerType) 
+  primary key c_custkey;
+create dataset Supplier(SupplierType)
+  primary key s_suppkey;
+create dataset Nation(NationType) 
+  primary key n_nationkey;
+create dataset Region(RegionType)
+  primary key r_regionkey;
+create dataset Part(PartType)
+  primary key p_partkey;
+
+for $s in dataset("Supplier")
+    for $lnrcop in (
+      for $lnrco in (
+        for $l in dataset('LineItem')
+        for $nrco in (
+          for $o in dataset('Orders')
+          for $nrc in (
+            for $c in dataset('Customer')
+            for $nr in (
+              for $n1 in dataset('Nation')
+              for $r1 in dataset('Region')
+              where $n1.n_regionkey = $r1.r_regionkey and $r1.r_name = 'AMERICA'
+              return { "n_nationkey": $n1.n_nationkey }
+            )
+            where $c.c_nationkey = $nr.n_nationkey
+            return { "c_custkey": $c.c_custkey }
+          )
+          where $nrc.c_custkey = $o.o_custkey
+          return {
+            "o_orderdate" : $o.o_orderdate, 
+            "o_orderkey": $o.o_orderkey 
+          }
+        )
+        where $l.l_orderkey = $nrco.o_orderkey
+          and $nrco.o_orderdate >= '1995-01-01' 
+          and $nrco.o_orderdate <= '1996-12-31'
+        return {
+          "o_orderdate": $nrco.o_orderdate, 
+          "l_partkey": $l.l_partkey, 
+          "l_discount": $l.l_discount, 
+          "l_extendedprice": $l.l_extendedprice, 
+          "l_suppkey": $l.l_suppkey
+        }
+      )
+      for $p in dataset("Part")
+      where $p.p_partkey = $lnrco.l_partkey and $p.p_type = 'ECONOMY ANODIZED STEEL'
+      return {
+        "o_orderdate": $lnrco.o_orderdate, 
+        "l_discount": $lnrco.l_discount, 
+        "l_extendedprice": $lnrco.l_extendedprice, 
+        "l_suppkey": $lnrco.l_suppkey 
+      }
+    )
+    where $s.s_suppkey = $lnrcop.l_suppkey
+    return {
+      "o_orderdate": $lnrcop.o_orderdate, 
+      "l_discount": $lnrcop.l_discount, 
+      "l_extendedprice": $lnrcop.l_extendedprice, 
+      "l_suppkey": $lnrcop.l_suppkey, 
+      "s_nationkey": $s.s_nationkey
+    }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/queries/q09_group_by.aql b/asterix-app/src/test/resources/optimizerts/queries/q09_group_by.aql
new file mode 100644
index 0000000..3c9dc47
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/q09_group_by.aql
@@ -0,0 +1,153 @@
+drop dataverse q09_group_by if exists;
+
+create dataverse q09_group_by;
+
+use dataverse q09_group_by;
+
+create type LineItemType as closed {
+  l_orderkey: int32, 
+  l_partkey: int32, 
+  l_suppkey: int32, 
+  l_linenumber: int32, 
+  l_quantity: int32, 
+  l_extendedprice: double,
+  l_discount: double, 
+  l_tax: double,
+  l_returnflag: string, 
+  l_linestatus: string, 
+  l_shipdate: string,
+  l_commitdate: string, 
+  l_receiptdate: string, 
+  l_shipinstruct: string, 
+  l_shipmode: string, 
+  l_comment: string
+}
+
+create type OrderType as closed {
+  o_orderkey: int32, 
+  o_custkey: int32, 
+  o_orderstatus: string, 
+  o_totalprice: double, 
+  o_orderdate: string, 
+  o_orderpriority: string,
+  o_clerk: string, 
+  o_shippriority: int32, 
+  o_comment: string
+}
+
+create type CustomerType as closed {
+  c_custkey: int32, 
+  c_name: string, 
+  c_address: string, 
+  c_nationkey: int32, 
+  c_phone: string, 
+  c_acctbal: double, 
+  c_mktsegment: string,
+  c_comment: string
+}
+
+create type SupplierType as closed {
+  s_suppkey: int32, 
+  s_name: string,
+  s_address: string,
+  s_nationkey: int32,
+  s_phone: string,
+  s_acctbal: double,
+  s_comment: string
+}
+
+create type NationType as closed {
+  n_nationkey: int32,
+  n_name: string,
+  n_regionkey: int32,
+  n_comment: string
+}
+
+create type RegionType as closed {
+	r_regionkey: int32, 
+	r_name: string, 
+	r_comment: string
+} 
+
+create type PartType as closed {
+  p_partkey: int32, 
+  p_name: string, 
+  p_mfgr: string,
+  p_brand: string,
+  p_type: string,
+  p_size: int32,
+  p_container: string,
+  p_retailprice: double,
+  p_comment: string
+}
+
+create type PartSuppType as closed {
+  ps_partkey: int32, 
+  ps_suppkey: int32,
+  ps_availqty: int32,
+  ps_supplycost: double,
+  ps_comment: string 
+}
+
+create dataset LineItem(LineItemType)
+  primary key l_orderkey, l_linenumber;
+create dataset Orders(OrderType)
+  primary key o_orderkey;
+create dataset Supplier(SupplierType)
+  primary key s_suppkey;
+create dataset Region(RegionType) 
+  primary key r_regionkey;
+create dataset Nation(NationType) 
+  primary key n_nationkey;
+create dataset Part(PartType)
+  primary key p_partkey;
+create dataset Partsupp(PartSuppType)
+  primary key ps_partkey, ps_suppkey;  
+create dataset Customer(CustomerType) 
+  primary key c_custkey;
+
+for $p in dataset('Part')
+    for $l2 in (
+      for $ps in dataset('Partsupp')
+      for $l1 in (
+        for $s1 in (
+          for $s in dataset('Supplier')
+          for $n in dataset('Nation')
+          where $n.n_nationkey = $s.s_nationkey
+          return {
+            "s_suppkey": $s.s_suppkey,
+            "n_name": $n.n_name
+          }
+        )
+        for $l in dataset('LineItem')
+        where $s1.s_suppkey = $l.l_suppkey       
+        return  {
+          "l_suppkey": $l.l_suppkey,
+          "l_extendedprice": $l.l_extendedprice,
+          "l_discount": $l.l_discount,
+          "l_quantity": $l.l_quantity,
+          "l_partkey": $l.l_partkey,
+          "l_orderkey": $l.l_orderkey,
+          "n_name": $s1.n_name
+        }
+      )
+      where $ps.ps_suppkey = $l1.l_suppkey and $ps.ps_partkey = $l1.l_partkey       
+      return {
+        "l_extendedprice": $l1.l_extendedprice,
+        "l_discount": $l1.l_discount,
+        "l_quantity": $l1.l_quantity,
+        "l_partkey": $l1.l_partkey,
+        "l_orderkey": $l1.l_orderkey,
+        "n_name": $l1.n_name,
+        "ps_supplycost": $ps.ps_supplycost
+      }
+    )
+    where contains($p.p_name, 'green') and $p.p_partkey = $l2.l_partkey    
+    return {
+      "l_extendedprice": $l2.l_extendedprice,
+      "l_discount": $l2.l_discount,
+      "l_quantity": $l2.l_quantity,
+      "l_orderkey": $l2.l_orderkey,
+      "n_name": $l2.n_name,
+      "ps_supplycost": $l2.ps_supplycost
+    }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/results/q08_group_by.plan b/asterix-app/src/test/resources/optimizerts/results/q08_group_by.plan
new file mode 100644
index 0000000..fefdfcb
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/q08_group_by.plan
@@ -0,0 +1,78 @@
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    -- STREAM_PROJECT  |PARTITIONED|
+      -- ASSIGN  |PARTITIONED|
+        -- STREAM_PROJECT  |PARTITIONED|
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            -- HYBRID_HASH_JOIN [$$78][$$104]  |PARTITIONED|
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                -- STREAM_PROJECT  |PARTITIONED|
+                  -- ASSIGN  |PARTITIONED|
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      -- DATASOURCE_SCAN  |PARTITIONED|
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+              -- HASH_PARTITION_EXCHANGE [$$104]  |PARTITIONED|
+                -- STREAM_PROJECT  |PARTITIONED|
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    -- HYBRID_HASH_JOIN [$$101][$$85]  |PARTITIONED|
+                      -- HASH_PARTITION_EXCHANGE [$$101]  |PARTITIONED|
+                        -- STREAM_PROJECT  |PARTITIONED|
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            -- HYBRID_HASH_JOIN [$$79][$$81]  |PARTITIONED|
+                              -- HASH_PARTITION_EXCHANGE [$$79]  |PARTITIONED|
+                                -- STREAM_PROJECT  |PARTITIONED|
+                                  -- ASSIGN  |PARTITIONED|
+                                    -- STREAM_PROJECT  |PARTITIONED|
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                              -- HASH_PARTITION_EXCHANGE [$$81]  |PARTITIONED|
+                                -- STREAM_PROJECT  |PARTITIONED|
+                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                    -- HYBRID_HASH_JOIN [$$96][$$82]  |PARTITIONED|
+                                      -- HASH_PARTITION_EXCHANGE [$$96]  |PARTITIONED|
+                                        -- STREAM_SELECT  |PARTITIONED|
+                                          -- STREAM_PROJECT  |PARTITIONED|
+                                            -- ASSIGN  |PARTITIONED|
+                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                -- DATASOURCE_SCAN  |PARTITIONED|
+                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                      -- HASH_PARTITION_EXCHANGE [$$82]  |PARTITIONED|
+                                        -- STREAM_PROJECT  |PARTITIONED|
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            -- HYBRID_HASH_JOIN [$$92][$$83]  |PARTITIONED|
+                                              -- HASH_PARTITION_EXCHANGE [$$92]  |PARTITIONED|
+                                                -- STREAM_PROJECT  |PARTITIONED|
+                                                  -- ASSIGN  |PARTITIONED|
+                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                              -- HASH_PARTITION_EXCHANGE [$$83]  |PARTITIONED|
+                                                -- STREAM_PROJECT  |PARTITIONED|
+                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                    -- HYBRID_HASH_JOIN [$$89][$$84]  |PARTITIONED|
+                                                      -- HASH_PARTITION_EXCHANGE [$$89]  |PARTITIONED|
+                                                        -- STREAM_PROJECT  |PARTITIONED|
+                                                          -- ASSIGN  |PARTITIONED|
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                        -- STREAM_PROJECT  |PARTITIONED|
+                                                          -- STREAM_SELECT  |PARTITIONED|
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        -- STREAM_PROJECT  |PARTITIONED|
+                          -- STREAM_SELECT  |PARTITIONED|
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/q09_group_by.plan b/asterix-app/src/test/resources/optimizerts/results/q09_group_by.plan
new file mode 100644
index 0000000..8a08e39
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/q09_group_by.plan
@@ -0,0 +1,55 @@
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    -- STREAM_PROJECT  |PARTITIONED|
+      -- ASSIGN  |PARTITIONED|
+        -- STREAM_PROJECT  |PARTITIONED|
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            -- HYBRID_HASH_JOIN [$$62][$$80]  |PARTITIONED|
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                -- STREAM_PROJECT  |PARTITIONED|
+                  -- STREAM_SELECT  |PARTITIONED|
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      -- DATASOURCE_SCAN  |PARTITIONED|
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+              -- HASH_PARTITION_EXCHANGE [$$80]  |PARTITIONED|
+                -- STREAM_PROJECT  |PARTITIONED|
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    -- HYBRID_HASH_JOIN [$$64, $$63][$$69, $$80]  |PARTITIONED|
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        -- STREAM_PROJECT  |PARTITIONED|
+                          -- ASSIGN  |PARTITIONED|
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                      -- HASH_PARTITION_EXCHANGE [$$80, $$69]  |PARTITIONED|
+                        -- STREAM_PROJECT  |PARTITIONED|
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            -- HYBRID_HASH_JOIN [$$65][$$69]  |PARTITIONED|
+                              -- HASH_PARTITION_EXCHANGE [$$65]  |PARTITIONED|
+                                -- STREAM_PROJECT  |PARTITIONED|
+                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                    -- HYBRID_HASH_JOIN [$$73][$$66]  |PARTITIONED|
+                                      -- HASH_PARTITION_EXCHANGE [$$73]  |PARTITIONED|
+                                        -- STREAM_PROJECT  |PARTITIONED|
+                                          -- ASSIGN  |PARTITIONED|
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        -- STREAM_PROJECT  |PARTITIONED|
+                                          -- ASSIGN  |PARTITIONED|
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                              -- HASH_PARTITION_EXCHANGE [$$69]  |PARTITIONED|
+                                -- STREAM_PROJECT  |PARTITIONED|
+                                  -- ASSIGN  |PARTITIONED|
+                                    -- STREAM_PROJECT  |PARTITIONED|
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.3.query.aql
index eee12d3..38f292b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.3.query.aql
@@ -5,6 +5,6 @@
  * Date         : 22th May 2013
  */
 
-for $a in [ {"f" : 19, "g": 1} , {"f" : 12, "g": 4} , {"f" : 10, "g": 1} , {"f" : 17, "g": 1}, {"f" : 12, "g": 4} ]
+for $a in [ {"f" : 19, "g": 1} , {"f" : 12, "g": 2} , {"f" : 10, "g": 1} , {"f" : 17, "g": 1}, {"f" : 12, "g": 4} ]
 distinct by $a.f
 return $a
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/day_of_week_01/day_of_week_01.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/day_of_week_01/day_of_week_01.1.ddl.aql
new file mode 100644
index 0000000..f92bdd4
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/day_of_week_01/day_of_week_01.1.ddl.aql
@@ -0,0 +1,7 @@
+/**
+ * day-of-week test case: test the day-of-week function
+ * Expected result: success
+ **/
+
+drop dataverse test if exists;
+create dataverse test;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/day_of_week_01/day_of_week_01.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/day_of_week_01/day_of_week_01.2.update.aql
new file mode 100644
index 0000000..cf8a1ae
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/day_of_week_01/day_of_week_01.2.update.aql
@@ -0,0 +1,4 @@
+/**
+ * day-of-week test case: test the day-of-week function
+ * Expected result: success
+ **/
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/day_of_week_01/day_of_week_01.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/day_of_week_01/day_of_week_01.3.query.aql
new file mode 100644
index 0000000..fc1e705
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/day_of_week_01/day_of_week_01.3.query.aql
@@ -0,0 +1,12 @@
+/**
+ * day-of-week test case: test the day-of-week function
+ * Expected result: success
+ **/
+
+use dataverse test;
+
+let $d1 := date("2013-08-06")
+let $d2 := date("-2013-08-06")
+let $dt1 := datetime("1913-08-06T15:53:28Z")
+let $dt2 := datetime("-1913-08-10T15:53:28Z")
+return { "1970-01-01": day-of-week(date("1970-01-01")), "2013-08-06": day-of-week($d1), "-2013-08-06": day-of-week($d2), "1913-08-06T15:53:28Z": day-of-week($dt1), "-1913-08-10T15:53:28Z": day-of-week($dt2), "null": day-of-week(null) }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temporal/day_of_week_01/day_of_week_01.1.adm b/asterix-app/src/test/resources/runtimets/results/temporal/day_of_week_01/day_of_week_01.1.adm
new file mode 100644
index 0000000..dff4aa2
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temporal/day_of_week_01/day_of_week_01.1.adm
@@ -0,0 +1 @@
+{ "1970-01-01": 4, "2013-08-06": 2, "-2013-08-06": 4, "1913-08-06T15:53:28Z": 3, "-1913-08-10T15:53:28Z": 7, "null": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index 3ec387c..fec809e 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -4370,7 +4370,12 @@
     </test-case>
   </test-group>
   <test-group name="temporal">
-  <test-case FilePath="temporal">
+    <test-case FilePath="temporal">
+     <compilation-unit name="day_of_week_01">
+        <output-dir compare="Text">day_of_week_01</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="temporal">
      <compilation-unit name="interval_bin">
         <output-dir compare="Text">interval_bin</output-dir>
       </compilation-unit>
diff --git a/asterix-aql/pom.xml b/asterix-aql/pom.xml
index da13747..465c3b9 100644
--- a/asterix-aql/pom.xml
+++ b/asterix-aql/pom.xml
@@ -139,6 +139,11 @@
 			<version>0.0.3</version>
 			<scope>compile</scope>
 		</dependency>
+		<dependency>
+			<groupId>xerces</groupId>
+			<artifactId>xerces</artifactId>
+			<version>2.4.0</version>
+		</dependency>
 	</dependencies>
 
 </project>
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
index 5549c02..b980337 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
@@ -74,7 +74,6 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
 import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
 import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
@@ -83,6 +82,7 @@
 import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
 import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -147,7 +147,7 @@
             DataverseTupleTranslator tupleReaderWriter = new DataverseTupleTranslator(true);
             ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(dataverse);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple);
-        } catch (BTreeDuplicateKeyException e) {
+        } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A dataverse with this name " + dataverse.getDataverseName()
                     + " already exists.", e);
         } catch (Exception e) {
@@ -177,7 +177,7 @@
             ITupleReference dataTypeTuple = createTuple(dataset.getDataverseName(), dataset.getItemTypeName(),
                     dataset.getDatasetName());
             insertTupleIntoIndex(jobId, MetadataSecondaryIndexes.DATATYPENAME_ON_DATASET_INDEX, dataTypeTuple);
-        } catch (BTreeDuplicateKeyException e) {
+        } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A dataset with this name " + dataset.getDatasetName()
                     + " already exists in dataverse '" + dataset.getDataverseName() + "'.", e);
         } catch (Exception e) {
@@ -191,7 +191,7 @@
             IndexTupleTranslator tupleWriter = new IndexTupleTranslator(true);
             ITupleReference tuple = tupleWriter.getTupleFromMetadataEntity(index);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.INDEX_DATASET, tuple);
-        } catch (BTreeDuplicateKeyException e) {
+        } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("An index with name '" + index.getIndexName() + "' already exists.", e);
         } catch (Exception e) {
             throw new MetadataException(e);
@@ -204,7 +204,7 @@
             NodeTupleTranslator tupleReaderWriter = new NodeTupleTranslator(true);
             ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(node);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.NODE_DATASET, tuple);
-        } catch (BTreeDuplicateKeyException e) {
+        } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A node with name '" + node.getNodeName() + "' already exists.", e);
         } catch (Exception e) {
             throw new MetadataException(e);
@@ -217,7 +217,7 @@
             NodeGroupTupleTranslator tupleReaderWriter = new NodeGroupTupleTranslator(true);
             ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(nodeGroup);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.NODEGROUP_DATASET, tuple);
-        } catch (BTreeDuplicateKeyException e) {
+        } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A nodegroup with name '" + nodeGroup.getNodeGroupName() + "' already exists.",
                     e);
         } catch (Exception e) {
@@ -231,7 +231,7 @@
             DatatypeTupleTranslator tupleReaderWriter = new DatatypeTupleTranslator(jobId, this, true);
             ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(datatype);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, tuple);
-        } catch (BTreeDuplicateKeyException e) {
+        } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A datatype with name '" + datatype.getDatatypeName() + "' already exists.", e);
         } catch (Exception e) {
             throw new MetadataException(e);
@@ -246,7 +246,7 @@
             ITupleReference functionTuple = tupleReaderWriter.getTupleFromMetadataEntity(function);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET, functionTuple);
 
-        } catch (BTreeDuplicateKeyException e) {
+        } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A function with this name " + function.getName() + " and arity "
                     + function.getArity() + " already exists in dataverse '" + function.getDataverseName() + "'.", e);
         } catch (Exception e) {
@@ -1057,7 +1057,7 @@
             ITupleReference adapterTuple = tupleReaderWriter.getTupleFromMetadataEntity(adapter);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, adapterTuple);
 
-        } catch (BTreeDuplicateKeyException e) {
+        } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A adapter with this name " + adapter.getAdapterIdentifier().getAdapterName()
                     + " already exists in dataverse '" + adapter.getAdapterIdentifier().getNamespace() + "'.", e);
         } catch (Exception e) {
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
index 6c55f12..541d703 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
@@ -55,7 +55,7 @@
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
+import edu.uci.ics.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
 
 /**
  * Translates a Datatype metadata entity to an ITupleReference and vice versa.
@@ -437,7 +437,7 @@
             mn.insertIntoDatatypeSecondaryIndex(jobId, topLevelType.getDataverseName(), typeName,
                     topLevelType.getDatatypeName());
 
-        } catch (BTreeDuplicateKeyException e) {
+        } catch (TreeIndexDuplicateKeyException e) {
             // The key may have been inserted by a previous DDL statement or by
             // a previous nested type.
         }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
index 43c43a3..41cf3c1 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
@@ -540,6 +540,8 @@
             FunctionConstants.ASTERIX_NS, "adjust-time-for-timezone", 2);
     public final static FunctionIdentifier ADJUST_DATETIME_FOR_TIMEZONE = new FunctionIdentifier(
             FunctionConstants.ASTERIX_NS, "adjust-datetime-for-timezone", 2);
+    public final static FunctionIdentifier DAY_OF_WEEK = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "day-of-week");
 
     public final static FunctionIdentifier GET_POINT_X_COORDINATE_ACCESSOR = new FunctionIdentifier(
             FunctionConstants.ASTERIX_NS, "get-x", 1);
@@ -855,6 +857,7 @@
         addFunction(GET_DAY_TIME_DURATION, OptionalADayTimeDurationTypeComputer.INSTANCE);
         addFunction(GET_YEAR_MONTH_DURATION, OptionalAYearMonthDurationTypeComputer.INSTANCE);
         addFunction(INTERVAL_BIN, OptionalAIntervalTypeComputer.INSTANCE);
+        addFunction(DAY_OF_WEEK, OptionalAInt32TypeComputer.INSTANCE);
 
         // interval constructors
         addFunction(INTERVAL_CONSTRUCTOR_DATE, OptionalAIntervalTypeComputer.INSTANCE);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DayOfWeekDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DayOfWeekDescriptor.java
new file mode 100644
index 0000000..11ba264
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DayOfWeekDescriptor.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DayOfWeekDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = AsterixBuiltinFunctions.DAY_OF_WEEK;
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+    private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+
+    // Fixed week day anchor: Thursday, 1 January 1970
+    private final static int ANCHOR_WEEKDAY = 4;
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new DayOfWeekDescriptor();
+        }
+
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+                    // possible returning types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINT32);
+                    private AMutableInt32 aInt32 = new AMutableInt32(0);
+
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut.reset();
+                        eval.evaluate(tuple);
+                        try {
+                            if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                            } else {
+                                int daysSinceAnchor;
+                                int reminder = 0;
+                                if (argOut.getByteArray()[0] == SER_DATETIME_TYPE_TAG) {
+                                    daysSinceAnchor = (int) (ADateTimeSerializerDeserializer.getChronon(
+                                            argOut.getByteArray(), 1) / GregorianCalendarSystem.CHRONON_OF_DAY);
+                                    reminder = (int) (ADateTimeSerializerDeserializer.getChronon(argOut.getByteArray(),
+                                            1) % GregorianCalendarSystem.CHRONON_OF_DAY);
+                                } else if (argOut.getByteArray()[0] == SER_DATE_TYPE_TAG) {
+                                    daysSinceAnchor = ADateSerializerDeserializer.getChronon(argOut.getByteArray(), 1);
+                                } else {
+                                    throw new AlgebricksException(
+                                            FID.getName()
+                                                    + ": expects input type DATETIME/DATE/NULL but got "
+                                                    + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut
+                                                            .getByteArray()[0]));
+                                }
+
+                                // adjust the day before 1970-01-01
+                                if (daysSinceAnchor < 0 && reminder != 0) {
+                                    daysSinceAnchor -= 1;
+                                }
+
+                                // compute the weekday (0-based, and 0 = Sunday). Adjustment is needed as the anchor day is Thursday 
+                                int weekday = (daysSinceAnchor + ANCHOR_WEEKDAY) % 7;
+
+                                // handle the negative weekday
+                                if (weekday < 0) {
+                                    weekday += 7;
+                                }
+
+                                // convert from 0-based to 1-based (so 7 = Sunday)
+                                if (weekday == 0) {
+                                    weekday = 7;
+                                }
+
+                                aInt32.setValue(weekday);
+
+                                int32Serde.serialize(aInt32, out);
+                            }
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
index 47555bd..a13eed3 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
@@ -222,6 +222,7 @@
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DateFromUnixTimeInDaysDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DatetimeFromDateAndTimeDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DatetimeFromUnixTimeInMsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DayOfWeekDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DayTimeDurationComparatorDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DurationEqualDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DurationFromMillisecondsDescriptor;
@@ -539,6 +540,7 @@
         temp.add(GetYearMonthDurationDescriptor.FACTORY);
         temp.add(GetDayTimeDurationDescriptor.FACTORY);
         temp.add(IntervalBinDescriptor.FACTORY);
+        temp.add(DayOfWeekDescriptor.FACTORY);
 
         // Interval constructor
         temp.add(AIntervalFromDateConstructorDescriptor.FACTORY);