You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2014/10/24 21:19:08 UTC

svn commit: r1634122 [3/6] - in /hive/branches/spark: itests/src/test/resources/ ql/src/test/results/clientpositive/spark/

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby10.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby10.q.out?rev=1634122&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby10.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby10.q.out Fri Oct 24 19:19:08 2014
@@ -0,0 +1,680 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE dest1(key INT, val1 INT, val2 INT)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE dest1(key INT, val1 INT, val2 INT)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1
+PREHOOK: query: CREATE TABLE dest2(key INT, val1 INT, val2 INT)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest2
+POSTHOOK: query: CREATE TABLE dest2(key INT, val1 INT, val2 INT)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest2
+PREHOOK: query: CREATE TABLE INPUT(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@INPUT
+POSTHOOK: query: CREATE TABLE INPUT(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@INPUT
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE INPUT
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@input
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE INPUT
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@input
+PREHOOK: query: EXPLAIN
+FROM INPUT 
+INSERT OVERWRITE TABLE dest1 SELECT INPUT.key, count(substr(INPUT.value,5)), count(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+INSERT OVERWRITE TABLE dest2 SELECT INPUT.key, sum(substr(INPUT.value,5)), sum(distinct substr(INPUT.value,5))   GROUP BY INPUT.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM INPUT 
+INSERT OVERWRITE TABLE dest1 SELECT INPUT.key, count(substr(INPUT.value,5)), count(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+INSERT OVERWRITE TABLE dest2 SELECT INPUT.key, sum(substr(INPUT.value,5)), sum(distinct substr(INPUT.value,5))   GROUP BY INPUT.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT, 1)
+        Reducer 3 <- Reducer 2 (GROUP, 1)
+        Reducer 4 <- Reducer 2 (GROUP, 1)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: input
+                  Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: substr(value, 5) (type: string)
+                    sort order: +
+                    Map-reduce partition columns: substr(value, 5) (type: string)
+                    Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: key (type: int)
+        Reducer 2 
+            Reduce Operator Tree:
+              Forward
+                Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count(KEY._col0), count(DISTINCT KEY._col0)
+                  keys: VALUE._col0 (type: int)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: int)
+                    Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: bigint), _col2 (type: bigint)
+                Group By Operator
+                  aggregations: sum(KEY._col0), sum(DISTINCT KEY._col0)
+                  keys: VALUE._col0 (type: int)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: int)
+                    Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: double), _col2 (type: double)
+        Reducer 3 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0), count(VALUE._col1)
+                keys: KEY._col0 (type: int)
+                mode: final
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: int), UDFToInteger(_col1) (type: int), UDFToInteger(_col2) (type: int)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest1
+        Reducer 4 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0), sum(VALUE._col1)
+                keys: KEY._col0 (type: int)
+                mode: final
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: int), UDFToInteger(_col1) (type: int), UDFToInteger(_col2) (type: int)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest2
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-4
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest2
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM INPUT
+INSERT OVERWRITE TABLE dest1 SELECT INPUT.key, count(substr(INPUT.value,5)), count(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+INSERT OVERWRITE TABLE dest2 SELECT INPUT.key, sum(substr(INPUT.value,5)), sum(distinct substr(INPUT.value,5))   GROUP BY INPUT.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@input
+PREHOOK: Output: default@dest1
+PREHOOK: Output: default@dest2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM INPUT
+INSERT OVERWRITE TABLE dest1 SELECT INPUT.key, count(substr(INPUT.value,5)), count(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+INSERT OVERWRITE TABLE dest2 SELECT INPUT.key, sum(substr(INPUT.value,5)), sum(distinct substr(INPUT.value,5))   GROUP BY INPUT.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@input
+POSTHOOK: Output: default@dest1
+POSTHOOK: Output: default@dest2
+POSTHOOK: Lineage: dest1.key SIMPLE [(input)input.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: dest1.val1 EXPRESSION [(input)input.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: dest1.val2 EXPRESSION [(input)input.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: dest2.key SIMPLE [(input)input.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: dest2.val1 EXPRESSION [(input)input.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: dest2.val2 EXPRESSION [(input)input.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT * from dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * from dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+128	1	1
+150	1	1
+165	1	1
+193	1	1
+213	3	2
+224	1	1
+238	3	3
+255	1	1
+265	1	1
+27	1	1
+273	1	1
+278	1	1
+311	1	1
+369	1	1
+401	1	1
+409	1	1
+484	1	1
+66	1	1
+86	1	1
+98	1	1
+PREHOOK: query: SELECT * from dest2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * from dest2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest2
+#### A masked pattern was here ####
+128	128	128
+150	150	150
+165	165	165
+193	193	193
+213	640	427
+224	224	224
+238	717	717
+255	255	255
+265	265	265
+27	27	27
+273	273	273
+278	278	278
+311	311	311
+369	369	369
+401	401	401
+409	409	409
+484	484	484
+66	66	66
+86	86	86
+98	98	98
+PREHOOK: query: EXPLAIN
+FROM INPUT 
+INSERT OVERWRITE TABLE dest1 SELECT INPUT.key, count(substr(INPUT.value,5)), count(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+INSERT OVERWRITE TABLE dest2 SELECT INPUT.key, sum(substr(INPUT.value,5)), sum(distinct substr(INPUT.value,5))   GROUP BY INPUT.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM INPUT 
+INSERT OVERWRITE TABLE dest1 SELECT INPUT.key, count(substr(INPUT.value,5)), count(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+INSERT OVERWRITE TABLE dest2 SELECT INPUT.key, sum(substr(INPUT.value,5)), sum(distinct substr(INPUT.value,5))   GROUP BY INPUT.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT, 1)
+        Reducer 3 <- Reducer 2 (GROUP, 1)
+        Reducer 4 <- Reducer 2 (GROUP, 1)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: input
+                  Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: substr(value, 5) (type: string)
+                    sort order: +
+                    Map-reduce partition columns: substr(value, 5) (type: string)
+                    Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: key (type: int)
+        Reducer 2 
+            Reduce Operator Tree:
+              Forward
+                Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count(KEY._col0), count(DISTINCT KEY._col0)
+                  keys: VALUE._col0 (type: int)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: int)
+                    Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: bigint), _col2 (type: bigint)
+                Group By Operator
+                  aggregations: sum(KEY._col0), sum(DISTINCT KEY._col0)
+                  keys: VALUE._col0 (type: int)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: int)
+                    Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: double), _col2 (type: double)
+        Reducer 3 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0), count(VALUE._col1)
+                keys: KEY._col0 (type: int)
+                mode: final
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: int), UDFToInteger(_col1) (type: int), UDFToInteger(_col2) (type: int)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest1
+        Reducer 4 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0), sum(VALUE._col1)
+                keys: KEY._col0 (type: int)
+                mode: final
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: int), UDFToInteger(_col1) (type: int), UDFToInteger(_col2) (type: int)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest2
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-4
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest2
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM INPUT
+INSERT OVERWRITE TABLE dest1 SELECT INPUT.key, count(substr(INPUT.value,5)), count(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+INSERT OVERWRITE TABLE dest2 SELECT INPUT.key, sum(substr(INPUT.value,5)), sum(distinct substr(INPUT.value,5))   GROUP BY INPUT.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@input
+PREHOOK: Output: default@dest1
+PREHOOK: Output: default@dest2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM INPUT
+INSERT OVERWRITE TABLE dest1 SELECT INPUT.key, count(substr(INPUT.value,5)), count(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+INSERT OVERWRITE TABLE dest2 SELECT INPUT.key, sum(substr(INPUT.value,5)), sum(distinct substr(INPUT.value,5))   GROUP BY INPUT.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@input
+POSTHOOK: Output: default@dest1
+POSTHOOK: Output: default@dest2
+POSTHOOK: Lineage: dest1.key SIMPLE [(input)input.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: dest1.val1 EXPRESSION [(input)input.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: dest1.val2 EXPRESSION [(input)input.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: dest2.key SIMPLE [(input)input.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: dest2.val1 EXPRESSION [(input)input.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: dest2.val2 EXPRESSION [(input)input.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT * from dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * from dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+128	1	1
+150	1	1
+165	1	1
+193	1	1
+213	3	2
+224	1	1
+238	3	3
+255	1	1
+265	1	1
+27	1	1
+273	1	1
+278	1	1
+311	1	1
+369	1	1
+401	1	1
+409	1	1
+484	1	1
+66	1	1
+86	1	1
+98	1	1
+PREHOOK: query: SELECT * from dest2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * from dest2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest2
+#### A masked pattern was here ####
+128	128	128
+150	150	150
+165	165	165
+193	193	193
+213	640	427
+224	224	224
+238	717	717
+255	255	255
+265	265	265
+27	27	27
+273	273	273
+278	278	278
+311	311	311
+369	369	369
+401	401	401
+409	409	409
+484	484	484
+66	66	66
+86	86	86
+98	98	98
+PREHOOK: query: -- HIVE-3852 Multi-groupby optimization fails when same distinct column is used twice or more
+EXPLAIN
+FROM INPUT
+INSERT OVERWRITE TABLE dest1 SELECT INPUT.key, sum(distinct substr(INPUT.value,5)), count(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+INSERT OVERWRITE TABLE dest2 SELECT INPUT.key, sum(distinct substr(INPUT.value,5)), avg(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+PREHOOK: type: QUERY
+POSTHOOK: query: -- HIVE-3852 Multi-groupby optimization fails when same distinct column is used twice or more
+EXPLAIN
+FROM INPUT
+INSERT OVERWRITE TABLE dest1 SELECT INPUT.key, sum(distinct substr(INPUT.value,5)), count(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+INSERT OVERWRITE TABLE dest2 SELECT INPUT.key, sum(distinct substr(INPUT.value,5)), avg(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT, 1)
+        Reducer 3 <- Reducer 2 (GROUP, 1)
+        Reducer 4 <- Reducer 2 (GROUP, 1)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: input
+                  Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: substr(value, 5) (type: string)
+                    sort order: +
+                    Map-reduce partition columns: substr(value, 5) (type: string)
+                    Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: key (type: int)
+        Reducer 2 
+            Reduce Operator Tree:
+              Forward
+                Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: sum(DISTINCT KEY._col0), count(DISTINCT KEY._col0)
+                  keys: VALUE._col0 (type: int)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: int)
+                    Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: double), _col2 (type: bigint)
+                Group By Operator
+                  aggregations: sum(DISTINCT KEY._col0), avg(DISTINCT KEY._col0)
+                  keys: VALUE._col0 (type: int)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: int)
+                    Statistics: Num rows: 2 Data size: 280 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: double), _col2 (type: struct<count:bigint,sum:double,input:string>)
+        Reducer 3 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0), count(VALUE._col1)
+                keys: KEY._col0 (type: int)
+                mode: final
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: int), UDFToInteger(_col1) (type: int), UDFToInteger(_col2) (type: int)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest1
+        Reducer 4 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0), avg(VALUE._col1)
+                keys: KEY._col0 (type: int)
+                mode: final
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: int), UDFToInteger(_col1) (type: int), UDFToInteger(_col2) (type: int)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 140 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest2
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-4
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest2
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM INPUT
+INSERT OVERWRITE TABLE dest1 SELECT INPUT.key, sum(distinct substr(INPUT.value,5)), count(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+INSERT OVERWRITE TABLE dest2 SELECT INPUT.key, sum(distinct substr(INPUT.value,5)), avg(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@input
+PREHOOK: Output: default@dest1
+PREHOOK: Output: default@dest2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM INPUT
+INSERT OVERWRITE TABLE dest1 SELECT INPUT.key, sum(distinct substr(INPUT.value,5)), count(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+INSERT OVERWRITE TABLE dest2 SELECT INPUT.key, sum(distinct substr(INPUT.value,5)), avg(distinct substr(INPUT.value,5)) GROUP BY INPUT.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@input
+POSTHOOK: Output: default@dest1
+POSTHOOK: Output: default@dest2
+POSTHOOK: Lineage: dest1.key SIMPLE [(input)input.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: dest1.val1 EXPRESSION [(input)input.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: dest1.val2 EXPRESSION [(input)input.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: dest2.key SIMPLE [(input)input.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: dest2.val1 EXPRESSION [(input)input.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: dest2.val2 EXPRESSION [(input)input.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT * from dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * from dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+128	128	1
+150	150	1
+165	165	1
+193	193	1
+213	427	2
+224	224	1
+238	717	3
+255	255	1
+265	265	1
+27	27	1
+273	273	1
+278	278	1
+311	311	1
+369	369	1
+401	401	1
+409	409	1
+484	484	1
+66	66	1
+86	86	1
+98	98	1
+PREHOOK: query: SELECT * from dest2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * from dest2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest2
+#### A masked pattern was here ####
+128	128	128
+150	150	150
+165	165	165
+193	193	193
+213	427	213
+224	224	224
+238	717	239
+255	255	255
+265	265	265
+27	27	27
+273	273	273
+278	278	278
+311	311	311
+369	369	369
+401	401	401
+409	409	409
+484	484	484
+66	66	66
+86	86	86
+98	98	98

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby11.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby11.q.out?rev=1634122&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby11.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby11.q.out Fri Oct 24 19:19:08 2014
@@ -0,0 +1,828 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE dest1(key STRING, val1 INT, val2 INT) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE dest1(key STRING, val1 INT, val2 INT) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1
+PREHOOK: query: CREATE TABLE dest2(key STRING, val1 INT, val2 INT) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest2
+POSTHOOK: query: CREATE TABLE dest2(key STRING, val1 INT, val2 INT) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest2
+PREHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 partition(ds='111')
+  SELECT src.value, count(src.key), count(distinct src.key) GROUP BY src.value
+INSERT OVERWRITE TABLE dest2  partition(ds='111')
+  SELECT substr(src.value, 5), count(src.key), count(distinct src.key) GROUP BY substr(src.value, 5)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 partition(ds='111')
+  SELECT src.value, count(src.key), count(distinct src.key) GROUP BY src.value
+INSERT OVERWRITE TABLE dest2  partition(ds='111')
+  SELECT substr(src.value, 5), count(src.key), count(distinct src.key) GROUP BY substr(src.value, 5)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT, 1)
+        Reducer 3 <- Reducer 2 (GROUP, 1)
+        Reducer 4 <- Reducer 2 (GROUP, 1)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: key (type: string)
+                    sort order: +
+                    Map-reduce partition columns: key (type: string)
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: value (type: string), substr(value, 5) (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Forward
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count(KEY._col0), count(DISTINCT KEY._col0)
+                  keys: VALUE._col0 (type: string)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: bigint), _col2 (type: bigint)
+                Group By Operator
+                  aggregations: count(KEY._col0), count(DISTINCT KEY._col0)
+                  keys: VALUE._col1 (type: string)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: bigint), _col2 (type: bigint)
+        Reducer 3 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0), count(VALUE._col1)
+                keys: KEY._col0 (type: string)
+                mode: final
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), UDFToInteger(_col1) (type: int), UDFToInteger(_col2) (type: int)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest1
+        Reducer 4 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0), count(VALUE._col1)
+                keys: KEY._col0 (type: string)
+                mode: final
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), UDFToInteger(_col1) (type: int), UDFToInteger(_col2) (type: int)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest2
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 111
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-4
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          partition:
+            ds 111
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest2
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 partition(ds='111')
+  SELECT src.value, count(src.key), count(distinct src.key) GROUP BY src.value
+INSERT OVERWRITE TABLE dest2  partition(ds='111')
+  SELECT substr(src.value, 5), count(src.key), count(distinct src.key) GROUP BY substr(src.value, 5)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1@ds=111
+PREHOOK: Output: default@dest2@ds=111
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 partition(ds='111')
+  SELECT src.value, count(src.key), count(distinct src.key) GROUP BY src.value
+INSERT OVERWRITE TABLE dest2  partition(ds='111')
+  SELECT substr(src.value, 5), count(src.key), count(distinct src.key) GROUP BY substr(src.value, 5)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1@ds=111
+POSTHOOK: Output: default@dest2@ds=111
+POSTHOOK: Lineage: dest1 PARTITION(ds=111).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1 PARTITION(ds=111).val1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1 PARTITION(ds=111).val2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2 PARTITION(ds=111).key EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2 PARTITION(ds=111).val1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2 PARTITION(ds=111).val2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: SELECT * from dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Input: default@dest1@ds=111
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * from dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Input: default@dest1@ds=111
+#### A masked pattern was here ####
+val_0	3	1	111
+val_10	1	1	111
+val_100	2	1	111
+val_103	2	1	111
+val_104	2	1	111
+val_105	1	1	111
+val_11	1	1	111
+val_111	1	1	111
+val_113	2	1	111
+val_114	1	1	111
+val_116	1	1	111
+val_118	2	1	111
+val_119	3	1	111
+val_12	2	1	111
+val_120	2	1	111
+val_125	2	1	111
+val_126	1	1	111
+val_128	3	1	111
+val_129	2	1	111
+val_131	1	1	111
+val_133	1	1	111
+val_134	2	1	111
+val_136	1	1	111
+val_137	2	1	111
+val_138	4	1	111
+val_143	1	1	111
+val_145	1	1	111
+val_146	2	1	111
+val_149	2	1	111
+val_15	2	1	111
+val_150	1	1	111
+val_152	2	1	111
+val_153	1	1	111
+val_155	1	1	111
+val_156	1	1	111
+val_157	1	1	111
+val_158	1	1	111
+val_160	1	1	111
+val_162	1	1	111
+val_163	1	1	111
+val_164	2	1	111
+val_165	2	1	111
+val_166	1	1	111
+val_167	3	1	111
+val_168	1	1	111
+val_169	4	1	111
+val_17	1	1	111
+val_170	1	1	111
+val_172	2	1	111
+val_174	2	1	111
+val_175	2	1	111
+val_176	2	1	111
+val_177	1	1	111
+val_178	1	1	111
+val_179	2	1	111
+val_18	2	1	111
+val_180	1	1	111
+val_181	1	1	111
+val_183	1	1	111
+val_186	1	1	111
+val_187	3	1	111
+val_189	1	1	111
+val_19	1	1	111
+val_190	1	1	111
+val_191	2	1	111
+val_192	1	1	111
+val_193	3	1	111
+val_194	1	1	111
+val_195	2	1	111
+val_196	1	1	111
+val_197	2	1	111
+val_199	3	1	111
+val_2	1	1	111
+val_20	1	1	111
+val_200	2	1	111
+val_201	1	1	111
+val_202	1	1	111
+val_203	2	1	111
+val_205	2	1	111
+val_207	2	1	111
+val_208	3	1	111
+val_209	2	1	111
+val_213	2	1	111
+val_214	1	1	111
+val_216	2	1	111
+val_217	2	1	111
+val_218	1	1	111
+val_219	2	1	111
+val_221	2	1	111
+val_222	1	1	111
+val_223	2	1	111
+val_224	2	1	111
+val_226	1	1	111
+val_228	1	1	111
+val_229	2	1	111
+val_230	5	1	111
+val_233	2	1	111
+val_235	1	1	111
+val_237	2	1	111
+val_238	2	1	111
+val_239	2	1	111
+val_24	2	1	111
+val_241	1	1	111
+val_242	2	1	111
+val_244	1	1	111
+val_247	1	1	111
+val_248	1	1	111
+val_249	1	1	111
+val_252	1	1	111
+val_255	2	1	111
+val_256	2	1	111
+val_257	1	1	111
+val_258	1	1	111
+val_26	2	1	111
+val_260	1	1	111
+val_262	1	1	111
+val_263	1	1	111
+val_265	2	1	111
+val_266	1	1	111
+val_27	1	1	111
+val_272	2	1	111
+val_273	3	1	111
+val_274	1	1	111
+val_275	1	1	111
+val_277	4	1	111
+val_278	2	1	111
+val_28	1	1	111
+val_280	2	1	111
+val_281	2	1	111
+val_282	2	1	111
+val_283	1	1	111
+val_284	1	1	111
+val_285	1	1	111
+val_286	1	1	111
+val_287	1	1	111
+val_288	2	1	111
+val_289	1	1	111
+val_291	1	1	111
+val_292	1	1	111
+val_296	1	1	111
+val_298	3	1	111
+val_30	1	1	111
+val_302	1	1	111
+val_305	1	1	111
+val_306	1	1	111
+val_307	2	1	111
+val_308	1	1	111
+val_309	2	1	111
+val_310	1	1	111
+val_311	3	1	111
+val_315	1	1	111
+val_316	3	1	111
+val_317	2	1	111
+val_318	3	1	111
+val_321	2	1	111
+val_322	2	1	111
+val_323	1	1	111
+val_325	2	1	111
+val_327	3	1	111
+val_33	1	1	111
+val_331	2	1	111
+val_332	1	1	111
+val_333	2	1	111
+val_335	1	1	111
+val_336	1	1	111
+val_338	1	1	111
+val_339	1	1	111
+val_34	1	1	111
+val_341	1	1	111
+val_342	2	1	111
+val_344	2	1	111
+val_345	1	1	111
+val_348	5	1	111
+val_35	3	1	111
+val_351	1	1	111
+val_353	2	1	111
+val_356	1	1	111
+val_360	1	1	111
+val_362	1	1	111
+val_364	1	1	111
+val_365	1	1	111
+val_366	1	1	111
+val_367	2	1	111
+val_368	1	1	111
+val_369	3	1	111
+val_37	2	1	111
+val_373	1	1	111
+val_374	1	1	111
+val_375	1	1	111
+val_377	1	1	111
+val_378	1	1	111
+val_379	1	1	111
+val_382	2	1	111
+val_384	3	1	111
+val_386	1	1	111
+val_389	1	1	111
+val_392	1	1	111
+val_393	1	1	111
+val_394	1	1	111
+val_395	2	1	111
+val_396	3	1	111
+val_397	2	1	111
+val_399	2	1	111
+val_4	1	1	111
+val_400	1	1	111
+val_401	5	1	111
+val_402	1	1	111
+val_403	3	1	111
+val_404	2	1	111
+val_406	4	1	111
+val_407	1	1	111
+val_409	3	1	111
+val_41	1	1	111
+val_411	1	1	111
+val_413	2	1	111
+val_414	2	1	111
+val_417	3	1	111
+val_418	1	1	111
+val_419	1	1	111
+val_42	2	1	111
+val_421	1	1	111
+val_424	2	1	111
+val_427	1	1	111
+val_429	2	1	111
+val_43	1	1	111
+val_430	3	1	111
+val_431	3	1	111
+val_432	1	1	111
+val_435	1	1	111
+val_436	1	1	111
+val_437	1	1	111
+val_438	3	1	111
+val_439	2	1	111
+val_44	1	1	111
+val_443	1	1	111
+val_444	1	1	111
+val_446	1	1	111
+val_448	1	1	111
+val_449	1	1	111
+val_452	1	1	111
+val_453	1	1	111
+val_454	3	1	111
+val_455	1	1	111
+val_457	1	1	111
+val_458	2	1	111
+val_459	2	1	111
+val_460	1	1	111
+val_462	2	1	111
+val_463	2	1	111
+val_466	3	1	111
+val_467	1	1	111
+val_468	4	1	111
+val_469	5	1	111
+val_47	1	1	111
+val_470	1	1	111
+val_472	1	1	111
+val_475	1	1	111
+val_477	1	1	111
+val_478	2	1	111
+val_479	1	1	111
+val_480	3	1	111
+val_481	1	1	111
+val_482	1	1	111
+val_483	1	1	111
+val_484	1	1	111
+val_485	1	1	111
+val_487	1	1	111
+val_489	4	1	111
+val_490	1	1	111
+val_491	1	1	111
+val_492	2	1	111
+val_493	1	1	111
+val_494	1	1	111
+val_495	1	1	111
+val_496	1	1	111
+val_497	1	1	111
+val_498	3	1	111
+val_5	3	1	111
+val_51	2	1	111
+val_53	1	1	111
+val_54	1	1	111
+val_57	1	1	111
+val_58	2	1	111
+val_64	1	1	111
+val_65	1	1	111
+val_66	1	1	111
+val_67	2	1	111
+val_69	1	1	111
+val_70	3	1	111
+val_72	2	1	111
+val_74	1	1	111
+val_76	2	1	111
+val_77	1	1	111
+val_78	1	1	111
+val_8	1	1	111
+val_80	1	1	111
+val_82	1	1	111
+val_83	2	1	111
+val_84	2	1	111
+val_85	1	1	111
+val_86	1	1	111
+val_87	1	1	111
+val_9	1	1	111
+val_90	3	1	111
+val_92	1	1	111
+val_95	2	1	111
+val_96	1	1	111
+val_97	2	1	111
+val_98	2	1	111
+PREHOOK: query: SELECT * from dest2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest2
+PREHOOK: Input: default@dest2@ds=111
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * from dest2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest2
+POSTHOOK: Input: default@dest2@ds=111
+#### A masked pattern was here ####
+0	3	1	111
+10	1	1	111
+100	2	1	111
+103	2	1	111
+104	2	1	111
+105	1	1	111
+11	1	1	111
+111	1	1	111
+113	2	1	111
+114	1	1	111
+116	1	1	111
+118	2	1	111
+119	3	1	111
+12	2	1	111
+120	2	1	111
+125	2	1	111
+126	1	1	111
+128	3	1	111
+129	2	1	111
+131	1	1	111
+133	1	1	111
+134	2	1	111
+136	1	1	111
+137	2	1	111
+138	4	1	111
+143	1	1	111
+145	1	1	111
+146	2	1	111
+149	2	1	111
+15	2	1	111
+150	1	1	111
+152	2	1	111
+153	1	1	111
+155	1	1	111
+156	1	1	111
+157	1	1	111
+158	1	1	111
+160	1	1	111
+162	1	1	111
+163	1	1	111
+164	2	1	111
+165	2	1	111
+166	1	1	111
+167	3	1	111
+168	1	1	111
+169	4	1	111
+17	1	1	111
+170	1	1	111
+172	2	1	111
+174	2	1	111
+175	2	1	111
+176	2	1	111
+177	1	1	111
+178	1	1	111
+179	2	1	111
+18	2	1	111
+180	1	1	111
+181	1	1	111
+183	1	1	111
+186	1	1	111
+187	3	1	111
+189	1	1	111
+19	1	1	111
+190	1	1	111
+191	2	1	111
+192	1	1	111
+193	3	1	111
+194	1	1	111
+195	2	1	111
+196	1	1	111
+197	2	1	111
+199	3	1	111
+2	1	1	111
+20	1	1	111
+200	2	1	111
+201	1	1	111
+202	1	1	111
+203	2	1	111
+205	2	1	111
+207	2	1	111
+208	3	1	111
+209	2	1	111
+213	2	1	111
+214	1	1	111
+216	2	1	111
+217	2	1	111
+218	1	1	111
+219	2	1	111
+221	2	1	111
+222	1	1	111
+223	2	1	111
+224	2	1	111
+226	1	1	111
+228	1	1	111
+229	2	1	111
+230	5	1	111
+233	2	1	111
+235	1	1	111
+237	2	1	111
+238	2	1	111
+239	2	1	111
+24	2	1	111
+241	1	1	111
+242	2	1	111
+244	1	1	111
+247	1	1	111
+248	1	1	111
+249	1	1	111
+252	1	1	111
+255	2	1	111
+256	2	1	111
+257	1	1	111
+258	1	1	111
+26	2	1	111
+260	1	1	111
+262	1	1	111
+263	1	1	111
+265	2	1	111
+266	1	1	111
+27	1	1	111
+272	2	1	111
+273	3	1	111
+274	1	1	111
+275	1	1	111
+277	4	1	111
+278	2	1	111
+28	1	1	111
+280	2	1	111
+281	2	1	111
+282	2	1	111
+283	1	1	111
+284	1	1	111
+285	1	1	111
+286	1	1	111
+287	1	1	111
+288	2	1	111
+289	1	1	111
+291	1	1	111
+292	1	1	111
+296	1	1	111
+298	3	1	111
+30	1	1	111
+302	1	1	111
+305	1	1	111
+306	1	1	111
+307	2	1	111
+308	1	1	111
+309	2	1	111
+310	1	1	111
+311	3	1	111
+315	1	1	111
+316	3	1	111
+317	2	1	111
+318	3	1	111
+321	2	1	111
+322	2	1	111
+323	1	1	111
+325	2	1	111
+327	3	1	111
+33	1	1	111
+331	2	1	111
+332	1	1	111
+333	2	1	111
+335	1	1	111
+336	1	1	111
+338	1	1	111
+339	1	1	111
+34	1	1	111
+341	1	1	111
+342	2	1	111
+344	2	1	111
+345	1	1	111
+348	5	1	111
+35	3	1	111
+351	1	1	111
+353	2	1	111
+356	1	1	111
+360	1	1	111
+362	1	1	111
+364	1	1	111
+365	1	1	111
+366	1	1	111
+367	2	1	111
+368	1	1	111
+369	3	1	111
+37	2	1	111
+373	1	1	111
+374	1	1	111
+375	1	1	111
+377	1	1	111
+378	1	1	111
+379	1	1	111
+382	2	1	111
+384	3	1	111
+386	1	1	111
+389	1	1	111
+392	1	1	111
+393	1	1	111
+394	1	1	111
+395	2	1	111
+396	3	1	111
+397	2	1	111
+399	2	1	111
+4	1	1	111
+400	1	1	111
+401	5	1	111
+402	1	1	111
+403	3	1	111
+404	2	1	111
+406	4	1	111
+407	1	1	111
+409	3	1	111
+41	1	1	111
+411	1	1	111
+413	2	1	111
+414	2	1	111
+417	3	1	111
+418	1	1	111
+419	1	1	111
+42	2	1	111
+421	1	1	111
+424	2	1	111
+427	1	1	111
+429	2	1	111
+43	1	1	111
+430	3	1	111
+431	3	1	111
+432	1	1	111
+435	1	1	111
+436	1	1	111
+437	1	1	111
+438	3	1	111
+439	2	1	111
+44	1	1	111
+443	1	1	111
+444	1	1	111
+446	1	1	111
+448	1	1	111
+449	1	1	111
+452	1	1	111
+453	1	1	111
+454	3	1	111
+455	1	1	111
+457	1	1	111
+458	2	1	111
+459	2	1	111
+460	1	1	111
+462	2	1	111
+463	2	1	111
+466	3	1	111
+467	1	1	111
+468	4	1	111
+469	5	1	111
+47	1	1	111
+470	1	1	111
+472	1	1	111
+475	1	1	111
+477	1	1	111
+478	2	1	111
+479	1	1	111
+480	3	1	111
+481	1	1	111
+482	1	1	111
+483	1	1	111
+484	1	1	111
+485	1	1	111
+487	1	1	111
+489	4	1	111
+490	1	1	111
+491	1	1	111
+492	2	1	111
+493	1	1	111
+494	1	1	111
+495	1	1	111
+496	1	1	111
+497	1	1	111
+498	3	1	111
+5	3	1	111
+51	2	1	111
+53	1	1	111
+54	1	1	111
+57	1	1	111
+58	2	1	111
+64	1	1	111
+65	1	1	111
+66	1	1	111
+67	2	1	111
+69	1	1	111
+70	3	1	111
+72	2	1	111
+74	1	1	111
+76	2	1	111
+77	1	1	111
+78	1	1	111
+8	1	1	111
+80	1	1	111
+82	1	1	111
+83	2	1	111
+84	2	1	111
+85	1	1	111
+86	1	1	111
+87	1	1	111
+9	1	1	111
+90	3	1	111
+92	1	1	111
+95	2	1	111
+96	1	1	111
+97	2	1	111
+98	2	1	111

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3_map_skew.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3_map_skew.q.out?rev=1634122&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3_map_skew.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3_map_skew.q.out Fri Oct 24 19:19:08 2014
@@ -0,0 +1,163 @@
+PREHOOK: query: CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE, c6 DOUBLE, c7 DOUBLE, c8 DOUBLE, c9 DOUBLE) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
+POSTHOOK: query: CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE, c6 DOUBLE, c7 DOUBLE, c8 DOUBLE, c9 DOUBLE) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT
+  sum(substr(src.value,5)),
+  avg(substr(src.value,5)),
+  avg(DISTINCT substr(src.value,5)),
+  max(substr(src.value,5)),
+  min(substr(src.value,5)),
+  std(substr(src.value,5)),
+  stddev_samp(substr(src.value,5)),
+  variance(substr(src.value,5)),
+  var_samp(substr(src.value,5))
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT
+  sum(substr(src.value,5)),
+  avg(substr(src.value,5)),
+  avg(DISTINCT substr(src.value,5)),
+  max(substr(src.value,5)),
+  min(substr(src.value,5)),
+  std(substr(src.value,5)),
+  stddev_samp(substr(src.value,5)),
+  variance(substr(src.value,5)),
+  var_samp(substr(src.value,5))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT, 31)
+        Reducer 3 <- Reducer 2 (GROUP, 1)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: value (type: string)
+                    outputColumnNames: value
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: sum(substr(value, 5)), avg(substr(value, 5)), avg(DISTINCT substr(value, 5)), max(substr(value, 5)), min(substr(value, 5)), std(substr(value, 5)), stddev_samp(substr(value, 5)), variance(substr(value, 5)), var_samp(substr(value, 5))
+                      keys: substr(value, 5) (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col1 (type: double), _col2 (type: struct<count:bigint,sum:double,input:string>), _col4 (type: string), _col5 (type: string), _col6 (type: struct<count:bigint,sum:double,variance:double>), _col7 (type: struct<count:bigint,sum:double,variance:double>), _col8 (type: struct<count:bigint,sum:double,variance:double>), _col9 (type: struct<count:bigint,sum:double,variance:double>)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(DISTINCT KEY._col0:0._col0), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8)
+                mode: partials
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+                Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: double), _col1 (type: struct<count:bigint,sum:double,input:string>), _col2 (type: struct<count:bigint,sum:double,input:string>), _col3 (type: string), _col4 (type: string), _col5 (type: struct<count:bigint,sum:double,variance:double>), _col6 (type: struct<count:bigint,sum:double,variance:double>), _col7 (type: struct<count:bigint,sum:double,variance:double>), _col8 (type: struct<count:bigint,sum:double,variance:double>)
+        Reducer 3 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8)
+                mode: final
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+                Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double)
+                  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+                  Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT
+  sum(substr(src.value,5)),
+  avg(substr(src.value,5)),
+  avg(DISTINCT substr(src.value,5)),
+  max(substr(src.value,5)),
+  min(substr(src.value,5)),
+  std(substr(src.value,5)),
+  stddev_samp(substr(src.value,5)),
+  variance(substr(src.value,5)),
+  var_samp(substr(src.value,5))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT
+  sum(substr(src.value,5)),
+  avg(substr(src.value,5)),
+  avg(DISTINCT substr(src.value,5)),
+  max(substr(src.value,5)),
+  min(substr(src.value,5)),
+  std(substr(src.value,5)),
+  stddev_samp(substr(src.value,5)),
+  variance(substr(src.value,5)),
+  var_samp(substr(src.value,5))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.c1 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c2 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c3 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c4 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c5 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c6 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c7 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c8 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c9 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT c1, c2, c3, c4, c5, c6, c7, ROUND(c8, 5), ROUND(c9, 5) FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT c1, c2, c3, c4, c5, c6, c7, ROUND(c8, 5), ROUND(c9, 5) FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+130091.0	260.182	256.10355987055016	98.0	0.0	142.92680950752379	143.06995106518903	20428.07288	20469.0109

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby7.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby7.q.out?rev=1634122&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby7.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby7.q.out Fri Oct 24 19:19:08 2014
@@ -0,0 +1,674 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@DEST1
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@DEST1
+PREHOOK: query: CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@DEST2
+POSTHOOK: query: CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@DEST2
+PREHOOK: query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+PREHOOK: Output: default@dest2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Output: default@dest2
+POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT DEST1.* FROM DEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT DEST1.* FROM DEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+0	0.0
+10	10.0
+100	200.0
+103	206.0
+104	208.0
+105	105.0
+11	11.0
+111	111.0
+113	226.0
+114	114.0
+116	116.0
+118	236.0
+119	357.0
+12	24.0
+120	240.0
+125	250.0
+126	126.0
+128	384.0
+129	258.0
+131	131.0
+133	133.0
+134	268.0
+136	136.0
+137	274.0
+138	552.0
+143	143.0
+145	145.0
+146	292.0
+149	298.0
+15	30.0
+150	150.0
+152	304.0
+153	153.0
+155	155.0
+156	156.0
+157	157.0
+158	158.0
+160	160.0
+162	162.0
+163	163.0
+164	328.0
+165	330.0
+166	166.0
+167	501.0
+168	168.0
+169	676.0
+17	17.0
+170	170.0
+172	344.0
+174	348.0
+175	350.0
+176	352.0
+177	177.0
+178	178.0
+179	358.0
+18	36.0
+180	180.0
+181	181.0
+183	183.0
+186	186.0
+187	561.0
+189	189.0
+19	19.0
+190	190.0
+191	382.0
+192	192.0
+193	579.0
+194	194.0
+195	390.0
+196	196.0
+197	394.0
+199	597.0
+2	2.0
+20	20.0
+200	400.0
+201	201.0
+202	202.0
+203	406.0
+205	410.0
+207	414.0
+208	624.0
+209	418.0
+213	426.0
+214	214.0
+216	432.0
+217	434.0
+218	218.0
+219	438.0
+221	442.0
+222	222.0
+223	446.0
+224	448.0
+226	226.0
+228	228.0
+229	458.0
+230	1150.0
+233	466.0
+235	235.0
+237	474.0
+238	476.0
+239	478.0
+24	48.0
+241	241.0
+242	484.0
+244	244.0
+247	247.0
+248	248.0
+249	249.0
+252	252.0
+255	510.0
+256	512.0
+257	257.0
+258	258.0
+26	52.0
+260	260.0
+262	262.0
+263	263.0
+265	530.0
+266	266.0
+27	27.0
+272	544.0
+273	819.0
+274	274.0
+275	275.0
+277	1108.0
+278	556.0
+28	28.0
+280	560.0
+281	562.0
+282	564.0
+283	283.0
+284	284.0
+285	285.0
+286	286.0
+287	287.0
+288	576.0
+289	289.0
+291	291.0
+292	292.0
+296	296.0
+298	894.0
+30	30.0
+302	302.0
+305	305.0
+306	306.0
+307	614.0
+308	308.0
+309	618.0
+310	310.0
+311	933.0
+315	315.0
+316	948.0
+317	634.0
+318	954.0
+321	642.0
+322	644.0
+323	323.0
+325	650.0
+327	981.0
+33	33.0
+331	662.0
+332	332.0
+333	666.0
+335	335.0
+336	336.0
+338	338.0
+339	339.0
+34	34.0
+341	341.0
+342	684.0
+344	688.0
+345	345.0
+348	1740.0
+35	105.0
+351	351.0
+353	706.0
+356	356.0
+360	360.0
+362	362.0
+364	364.0
+365	365.0
+366	366.0
+367	734.0
+368	368.0
+369	1107.0
+37	74.0
+373	373.0
+374	374.0
+375	375.0
+377	377.0
+378	378.0
+379	379.0
+382	764.0
+384	1152.0
+386	386.0
+389	389.0
+392	392.0
+393	393.0
+394	394.0
+395	790.0
+396	1188.0
+397	794.0
+399	798.0
+4	4.0
+400	400.0
+401	2005.0
+402	402.0
+403	1209.0
+404	808.0
+406	1624.0
+407	407.0
+409	1227.0
+41	41.0
+411	411.0
+413	826.0
+414	828.0
+417	1251.0
+418	418.0
+419	419.0
+42	84.0
+421	421.0
+424	848.0
+427	427.0
+429	858.0
+43	43.0
+430	1290.0
+431	1293.0
+432	432.0
+435	435.0
+436	436.0
+437	437.0
+438	1314.0
+439	878.0
+44	44.0
+443	443.0
+444	444.0
+446	446.0
+448	448.0
+449	449.0
+452	452.0
+453	453.0
+454	1362.0
+455	455.0
+457	457.0
+458	916.0
+459	918.0
+460	460.0
+462	924.0
+463	926.0
+466	1398.0
+467	467.0
+468	1872.0
+469	2345.0
+47	47.0
+470	470.0
+472	472.0
+475	475.0
+477	477.0
+478	956.0
+479	479.0
+480	1440.0
+481	481.0
+482	482.0
+483	483.0
+484	484.0
+485	485.0
+487	487.0
+489	1956.0
+490	490.0
+491	491.0
+492	984.0
+493	493.0
+494	494.0
+495	495.0
+496	496.0
+497	497.0
+498	1494.0
+5	15.0
+51	102.0
+53	53.0
+54	54.0
+57	57.0
+58	116.0
+64	64.0
+65	65.0
+66	66.0
+67	134.0
+69	69.0
+70	210.0
+72	144.0
+74	74.0
+76	152.0
+77	77.0
+78	78.0
+8	8.0
+80	80.0
+82	82.0
+83	166.0
+84	168.0
+85	85.0
+86	86.0
+87	87.0
+9	9.0
+90	270.0
+92	92.0
+95	190.0
+96	96.0
+97	194.0
+98	196.0
+PREHOOK: query: SELECT DEST2.* FROM DEST2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT DEST2.* FROM DEST2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest2
+#### A masked pattern was here ####
+0	0.0
+10	10.0
+100	200.0
+103	206.0
+104	208.0
+105	105.0
+11	11.0
+111	111.0
+113	226.0
+114	114.0
+116	116.0
+118	236.0
+119	357.0
+12	24.0
+120	240.0
+125	250.0
+126	126.0
+128	384.0
+129	258.0
+131	131.0
+133	133.0
+134	268.0
+136	136.0
+137	274.0
+138	552.0
+143	143.0
+145	145.0
+146	292.0
+149	298.0
+15	30.0
+150	150.0
+152	304.0
+153	153.0
+155	155.0
+156	156.0
+157	157.0
+158	158.0
+160	160.0
+162	162.0
+163	163.0
+164	328.0
+165	330.0
+166	166.0
+167	501.0
+168	168.0
+169	676.0
+17	17.0
+170	170.0
+172	344.0
+174	348.0
+175	350.0
+176	352.0
+177	177.0
+178	178.0
+179	358.0
+18	36.0
+180	180.0
+181	181.0
+183	183.0
+186	186.0
+187	561.0
+189	189.0
+19	19.0
+190	190.0
+191	382.0
+192	192.0
+193	579.0
+194	194.0
+195	390.0
+196	196.0
+197	394.0
+199	597.0
+2	2.0
+20	20.0
+200	400.0
+201	201.0
+202	202.0
+203	406.0
+205	410.0
+207	414.0
+208	624.0
+209	418.0
+213	426.0
+214	214.0
+216	432.0
+217	434.0
+218	218.0
+219	438.0
+221	442.0
+222	222.0
+223	446.0
+224	448.0
+226	226.0
+228	228.0
+229	458.0
+230	1150.0
+233	466.0
+235	235.0
+237	474.0
+238	476.0
+239	478.0
+24	48.0
+241	241.0
+242	484.0
+244	244.0
+247	247.0
+248	248.0
+249	249.0
+252	252.0
+255	510.0
+256	512.0
+257	257.0
+258	258.0
+26	52.0
+260	260.0
+262	262.0
+263	263.0
+265	530.0
+266	266.0
+27	27.0
+272	544.0
+273	819.0
+274	274.0
+275	275.0
+277	1108.0
+278	556.0
+28	28.0
+280	560.0
+281	562.0
+282	564.0
+283	283.0
+284	284.0
+285	285.0
+286	286.0
+287	287.0
+288	576.0
+289	289.0
+291	291.0
+292	292.0
+296	296.0
+298	894.0
+30	30.0
+302	302.0
+305	305.0
+306	306.0
+307	614.0
+308	308.0
+309	618.0
+310	310.0
+311	933.0
+315	315.0
+316	948.0
+317	634.0
+318	954.0
+321	642.0
+322	644.0
+323	323.0
+325	650.0
+327	981.0
+33	33.0
+331	662.0
+332	332.0
+333	666.0
+335	335.0
+336	336.0
+338	338.0
+339	339.0
+34	34.0
+341	341.0
+342	684.0
+344	688.0
+345	345.0
+348	1740.0
+35	105.0
+351	351.0
+353	706.0
+356	356.0
+360	360.0
+362	362.0
+364	364.0
+365	365.0
+366	366.0
+367	734.0
+368	368.0
+369	1107.0
+37	74.0
+373	373.0
+374	374.0
+375	375.0
+377	377.0
+378	378.0
+379	379.0
+382	764.0
+384	1152.0
+386	386.0
+389	389.0
+392	392.0
+393	393.0
+394	394.0
+395	790.0
+396	1188.0
+397	794.0
+399	798.0
+4	4.0
+400	400.0
+401	2005.0
+402	402.0
+403	1209.0
+404	808.0
+406	1624.0
+407	407.0
+409	1227.0
+41	41.0
+411	411.0
+413	826.0
+414	828.0
+417	1251.0
+418	418.0
+419	419.0
+42	84.0
+421	421.0
+424	848.0
+427	427.0
+429	858.0
+43	43.0
+430	1290.0
+431	1293.0
+432	432.0
+435	435.0
+436	436.0
+437	437.0
+438	1314.0
+439	878.0
+44	44.0
+443	443.0
+444	444.0
+446	446.0
+448	448.0
+449	449.0
+452	452.0
+453	453.0
+454	1362.0
+455	455.0
+457	457.0
+458	916.0
+459	918.0
+460	460.0
+462	924.0
+463	926.0
+466	1398.0
+467	467.0
+468	1872.0
+469	2345.0
+47	47.0
+470	470.0
+472	472.0
+475	475.0
+477	477.0
+478	956.0
+479	479.0
+480	1440.0
+481	481.0
+482	482.0
+483	483.0
+484	484.0
+485	485.0
+487	487.0
+489	1956.0
+490	490.0
+491	491.0
+492	984.0
+493	493.0
+494	494.0
+495	495.0
+496	496.0
+497	497.0
+498	1494.0
+5	15.0
+51	102.0
+53	53.0
+54	54.0
+57	57.0
+58	116.0
+64	64.0
+65	65.0
+66	66.0
+67	134.0
+69	69.0
+70	210.0
+72	144.0
+74	74.0
+76	152.0
+77	77.0
+78	78.0
+8	8.0
+80	80.0
+82	82.0
+83	166.0
+84	168.0
+85	85.0
+86	86.0
+87	87.0
+9	9.0
+90	270.0
+92	92.0
+95	190.0
+96	96.0
+97	194.0
+98	196.0

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out?rev=1634122&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out Fri Oct 24 19:19:08 2014
@@ -0,0 +1,228 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@DEST1
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@DEST1
+PREHOOK: query: CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@DEST2
+POSTHOOK: query: CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@DEST2
+PREHOOK: query: EXPLAIN
+FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key limit 10
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key limit 10
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key limit 10
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key limit 10
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT, 31)
+        Reducer 3 <- Reducer 2 (GROUP, 1)
+        Reducer 4 <- Reducer 2 (GROUP, 1)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: key, value
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: key (type: string)
+                      sort order: +
+                      Map-reduce partition columns: key (type: string)
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: substr(value, 5) (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Forward
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: sum(VALUE._col0)
+                  keys: KEY._col0 (type: string)
+                  mode: complete
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: double)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    Limit
+                      Number of rows: 10
+                      Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: double)
+                Group By Operator
+                  aggregations: sum(VALUE._col0)
+                  keys: KEY._col0 (type: string)
+                  mode: complete
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: double)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    Limit
+                      Number of rows: 10
+                      Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: double)
+        Reducer 3 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: double)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: true
+                      Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.dest1
+        Reducer 4 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: double)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: true
+                      Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.dest2
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-4
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest2
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key ORDER BY SRC.key limit 10
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key ORDER BY SRC.key limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+PREHOOK: Output: default@dest2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM SRC
+INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key ORDER BY SRC.key limit 10
+INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key ORDER BY SRC.key limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Output: default@dest2
+POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT DEST1.* FROM DEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT DEST1.* FROM DEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+0	0.0
+10	10.0
+100	200.0
+103	206.0
+104	208.0
+105	105.0
+11	11.0
+111	111.0
+113	226.0
+114	114.0
+PREHOOK: query: SELECT DEST2.* FROM DEST2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT DEST2.* FROM DEST2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest2
+#### A masked pattern was here ####
+0	0.0
+10	10.0
+100	200.0
+103	206.0
+104	208.0
+105	105.0
+11	11.0
+111	111.0
+113	226.0
+114	114.0