move hivesterix codebase into hyracks fullstack
git-svn-id: https://hyracks.googlecode.com/svn/branches/fullstack_staging@2420 123451ca-8445-de46-9d55-352943316053
diff --git a/src/test/resources/optimizerts/results/q13_customer_distribution.plan b/src/test/resources/optimizerts/results/q13_customer_distribution.plan
new file mode 100644
index 0000000..19bcd24
--- /dev/null
+++ b/src/test/resources/optimizerts/results/q13_customer_distribution.plan
@@ -0,0 +1,80 @@
+write [%0->$$22, %0->$$23]
+-- SINK_WRITE |PARTITIONED|
+ project ([$$22, $$23])
+ -- STREAM_PROJECT |PARTITIONED|
+ assign [$$22, $$23] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$20], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$21]]
+ -- ASSIGN |PARTITIONED|
+ exchange
+ -- SORT_MERGE_EXCHANGE [$$21(DESC), $$20(DESC) ] |PARTITIONED|
+ order (DESC, %0->$$21) (DESC, %0->$$20)
+ -- STABLE_SORT [$$21(DESC), $$20(DESC)] |LOCAL|
+ exchange
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ group by ([$$20 := %0->$$28]) decor ([]) {
+ aggregate [$$21] <- [function-call: hive:count(FINAL), Args:[%0->$$27]]
+ -- AGGREGATE |LOCAL|
+ nested tuple source
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- EXTERNAL_GROUP_BY[$$28] |PARTITIONED|
+ exchange
+ -- HASH_PARTITION_EXCHANGE [$$28] |PARTITIONED|
+ group by ([$$28 := %0->$$19]) decor ([]) {
+ aggregate [$$27] <- [function-call: hive:count(PARTIAL1), Args:[1]]
+ -- AGGREGATE |LOCAL|
+ nested tuple source
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- EXTERNAL_GROUP_BY[$$19] |LOCAL|
+ exchange
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ project ([$$19])
+ -- STREAM_PROJECT |PARTITIONED|
+ exchange
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ group by ([$$18 := %0->$$26]) decor ([]) {
+ aggregate [$$19] <- [function-call: hive:count(FINAL), Args:[%0->$$25]]
+ -- AGGREGATE |LOCAL|
+ nested tuple source
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- EXTERNAL_GROUP_BY[$$26] |PARTITIONED|
+ exchange
+ -- HASH_PARTITION_EXCHANGE [$$26] |PARTITIONED|
+ group by ([$$26 := %0->$$10]) decor ([]) {
+ aggregate [$$25] <- [function-call: hive:count(PARTIAL1), Args:[%0->$$1]]
+ -- AGGREGATE |LOCAL|
+ nested tuple source
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- EXTERNAL_GROUP_BY[$$10] |LOCAL|
+ exchange
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ project ([$$10, $$1])
+ -- STREAM_PROJECT |PARTITIONED|
+ exchange
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ left outer join (function-call: algebricks:eq, Args:[%0->$$2, %0->$$10])
+ -- HYBRID_HASH_JOIN [$$10][$$2] |PARTITIONED|
+ exchange
+ -- HASH_PARTITION_EXCHANGE [$$10] |PARTITIONED|
+ data-scan [$$10]<-[$$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17] <- default.customer
+ -- DATASOURCE_SCAN |PARTITIONED|
+ exchange
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ empty-tuple-source
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ exchange
+ -- HASH_PARTITION_EXCHANGE [$$2] |PARTITIONED|
+ project ([$$2, $$1])
+ -- STREAM_PROJECT |PARTITIONED|
+ select (function-call: algebricks:not, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$9, %special%requests%]])
+ -- STREAM_SELECT |PARTITIONED|
+ exchange
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ data-scan [$$1, $$2, $$9]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.orders
+ -- DATASOURCE_SCAN |PARTITIONED|
+ exchange
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ empty-tuple-source
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|