You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2014/09/24 05:16:26 UTC

svn commit: r1627210 [9/14] - in /hive/branches/spark: itests/src/test/resources/ ql/src/test/results/clientpositive/spark/

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby.q.out?rev=1627210&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby.q.out Wed Sep 24 03:16:25 2014
@@ -0,0 +1,719 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+--HIVE-3699 Multiple insert overwrite into multiple tables query stores same results in all tables
+create table e1 (key string, count int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@e1
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+--HIVE-3699 Multiple insert overwrite into multiple tables query stores same results in all tables
+create table e1 (key string, count int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@e1
+PREHOOK: query: create table e2 (key string, count int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@e2
+POSTHOOK: query: create table e2 (key string, count int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@e2
+PREHOOK: query: explain FROM src
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(*) WHERE key>450 GROUP BY key
+INSERT OVERWRITE TABLE e2
+    SELECT key, COUNT(*) WHERE key>500 GROUP BY key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain FROM src
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(*) WHERE key>450 GROUP BY key
+INSERT OVERWRITE TABLE e2
+    SELECT key, COUNT(*) WHERE key>500 GROUP BY key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: ((key > 450) or (key > 500)) (type: boolean)
+                    Statistics: Num rows: 332 Data size: 3527 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string)
+                      outputColumnNames: key
+                      Statistics: Num rows: 332 Data size: 3527 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: key (type: string)
+                        sort order: +
+                        Map-reduce partition columns: key (type: string)
+                        Statistics: Num rows: 332 Data size: 3527 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Forward
+                Statistics: Num rows: 332 Data size: 3527 Basic stats: COMPLETE Column stats: NONE
+                Filter Operator
+                  predicate: (KEY._col0 > 450) (type: boolean)
+                  Statistics: Num rows: 110 Data size: 1168 Basic stats: COMPLETE Column stats: NONE
+                  Group By Operator
+                    aggregations: count()
+                    keys: KEY._col0 (type: string)
+                    mode: complete
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: _col0 (type: string), UDFToInteger(_col1) (type: int)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.e1
+                Filter Operator
+                  predicate: (KEY._col0 > 500) (type: boolean)
+                  Statistics: Num rows: 110 Data size: 1168 Basic stats: COMPLETE Column stats: NONE
+                  Group By Operator
+                    aggregations: count()
+                    keys: KEY._col0 (type: string)
+                    mode: complete
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: _col0 (type: string), UDFToInteger(_col1) (type: int)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.e2
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e1
+
+  Stage: Stage-4
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e2
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(*) WHERE key>450 GROUP BY key
+INSERT OVERWRITE TABLE e2
+    SELECT key, COUNT(*) WHERE key>500 GROUP BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@e1
+PREHOOK: Output: default@e2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(*) WHERE key>450 GROUP BY key
+INSERT OVERWRITE TABLE e2
+    SELECT key, COUNT(*) WHERE key>500 GROUP BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@e1
+POSTHOOK: Output: default@e2
+POSTHOOK: Lineage: e1.count EXPRESSION [(src)src.null, ]
+POSTHOOK: Lineage: e1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: e2.count EXPRESSION [(src)src.null, ]
+POSTHOOK: Lineage: e2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: select * from e1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e1
+#### A masked pattern was here ####
+452	1
+453	1
+454	3
+455	1
+457	1
+458	2
+459	2
+460	1
+462	2
+463	2
+466	3
+467	1
+468	4
+469	5
+470	1
+472	1
+475	1
+477	1
+478	2
+479	1
+480	3
+481	1
+482	1
+483	1
+484	1
+485	1
+487	1
+489	4
+490	1
+491	1
+492	2
+493	1
+494	1
+495	1
+496	1
+497	1
+498	3
+PREHOOK: query: select * from e2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e2
+#### A masked pattern was here ####
+PREHOOK: query: explain FROM src
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(*) WHERE key>450 GROUP BY key
+INSERT OVERWRITE TABLE e2
+    SELECT key, COUNT(*) GROUP BY key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain FROM src
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(*) WHERE key>450 GROUP BY key
+INSERT OVERWRITE TABLE e2
+    SELECT key, COUNT(*) GROUP BY key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string)
+                    outputColumnNames: key
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: key (type: string)
+                      sort order: +
+                      Map-reduce partition columns: key (type: string)
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Forward
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Filter Operator
+                  predicate: (KEY._col0 > 450) (type: boolean)
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                  Group By Operator
+                    aggregations: count()
+                    keys: KEY._col0 (type: string)
+                    mode: complete
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: _col0 (type: string), UDFToInteger(_col1) (type: int)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.e1
+                Group By Operator
+                  aggregations: count()
+                  keys: KEY._col0 (type: string)
+                  mode: complete
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), UDFToInteger(_col1) (type: int)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.e2
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e1
+
+  Stage: Stage-4
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e2
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(*) WHERE key>450 GROUP BY key
+INSERT OVERWRITE TABLE e2
+    SELECT key, COUNT(*) GROUP BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@e1
+PREHOOK: Output: default@e2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(*) WHERE key>450 GROUP BY key
+INSERT OVERWRITE TABLE e2
+    SELECT key, COUNT(*) GROUP BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@e1
+POSTHOOK: Output: default@e2
+POSTHOOK: Lineage: e1.count EXPRESSION [(src)src.null, ]
+POSTHOOK: Lineage: e1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: e2.count EXPRESSION [(src)src.null, ]
+POSTHOOK: Lineage: e2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: select * from e1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e1
+#### A masked pattern was here ####
+452	1
+453	1
+454	3
+455	1
+457	1
+458	2
+459	2
+460	1
+462	2
+463	2
+466	3
+467	1
+468	4
+469	5
+470	1
+472	1
+475	1
+477	1
+478	2
+479	1
+480	3
+481	1
+482	1
+483	1
+484	1
+485	1
+487	1
+489	4
+490	1
+491	1
+492	2
+493	1
+494	1
+495	1
+496	1
+497	1
+498	3
+PREHOOK: query: select * from e2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e2
+#### A masked pattern was here ####
+0	3
+10	1
+100	2
+103	2
+104	2
+105	1
+11	1
+111	1
+113	2
+114	1
+116	1
+118	2
+119	3
+12	2
+120	2
+125	2
+126	1
+128	3
+129	2
+131	1
+133	1
+134	2
+136	1
+137	2
+138	4
+143	1
+145	1
+146	2
+149	2
+15	2
+150	1
+152	2
+153	1
+155	1
+156	1
+157	1
+158	1
+160	1
+162	1
+163	1
+164	2
+165	2
+166	1
+167	3
+168	1
+169	4
+17	1
+170	1
+172	2
+174	2
+175	2
+176	2
+177	1
+178	1
+179	2
+18	2
+180	1
+181	1
+183	1
+186	1
+187	3
+189	1
+19	1
+190	1
+191	2
+192	1
+193	3
+194	1
+195	2
+196	1
+197	2
+199	3
+2	1
+20	1
+200	2
+201	1
+202	1
+203	2
+205	2
+207	2
+208	3
+209	2
+213	2
+214	1
+216	2
+217	2
+218	1
+219	2
+221	2
+222	1
+223	2
+224	2
+226	1
+228	1
+229	2
+230	5
+233	2
+235	1
+237	2
+238	2
+239	2
+24	2
+241	1
+242	2
+244	1
+247	1
+248	1
+249	1
+252	1
+255	2
+256	2
+257	1
+258	1
+26	2
+260	1
+262	1
+263	1
+265	2
+266	1
+27	1
+272	2
+273	3
+274	1
+275	1
+277	4
+278	2
+28	1
+280	2
+281	2
+282	2
+283	1
+284	1
+285	1
+286	1
+287	1
+288	2
+289	1
+291	1
+292	1
+296	1
+298	3
+30	1
+302	1
+305	1
+306	1
+307	2
+308	1
+309	2
+310	1
+311	3
+315	1
+316	3
+317	2
+318	3
+321	2
+322	2
+323	1
+325	2
+327	3
+33	1
+331	2
+332	1
+333	2
+335	1
+336	1
+338	1
+339	1
+34	1
+341	1
+342	2
+344	2
+345	1
+348	5
+35	3
+351	1
+353	2
+356	1
+360	1
+362	1
+364	1
+365	1
+366	1
+367	2
+368	1
+369	3
+37	2
+373	1
+374	1
+375	1
+377	1
+378	1
+379	1
+382	2
+384	3
+386	1
+389	1
+392	1
+393	1
+394	1
+395	2
+396	3
+397	2
+399	2
+4	1
+400	1
+401	5
+402	1
+403	3
+404	2
+406	4
+407	1
+409	3
+41	1
+411	1
+413	2
+414	2
+417	3
+418	1
+419	1
+42	2
+421	1
+424	2
+427	1
+429	2
+43	1
+430	3
+431	3
+432	1
+435	1
+436	1
+437	1
+438	3
+439	2
+44	1
+443	1
+444	1
+446	1
+448	1
+449	1
+452	1
+453	1
+454	3
+455	1
+457	1
+458	2
+459	2
+460	1
+462	2
+463	2
+466	3
+467	1
+468	4
+469	5
+47	1
+470	1
+472	1
+475	1
+477	1
+478	2
+479	1
+480	3
+481	1
+482	1
+483	1
+484	1
+485	1
+487	1
+489	4
+490	1
+491	1
+492	2
+493	1
+494	1
+495	1
+496	1
+497	1
+498	3
+5	3
+51	2
+53	1
+54	1
+57	1
+58	2
+64	1
+65	1
+66	1
+67	2
+69	1
+70	3
+72	2
+74	1
+76	2
+77	1
+78	1
+8	1
+80	1
+82	1
+83	2
+84	2
+85	1
+86	1
+87	1
+9	1
+90	3
+92	1
+95	2
+96	1
+97	2
+98	2

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby2.q.out?rev=1627210&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby2.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby2.q.out Wed Sep 24 03:16:25 2014
@@ -0,0 +1,180 @@
+PREHOOK: query: --HIVE-3699 Multiple insert overwrite into multiple tables query stores same results in all tables
+create table e1 (count int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@e1
+POSTHOOK: query: --HIVE-3699 Multiple insert overwrite into multiple tables query stores same results in all tables
+create table e1 (count int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@e1
+PREHOOK: query: create table e2 (percentile double)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@e2
+POSTHOOK: query: create table e2 (percentile double)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@e2
+PREHOOK: query: explain
+FROM (select key, cast(key as double) as value from src order by key) a
+INSERT OVERWRITE TABLE e1
+    SELECT COUNT(*)
+INSERT OVERWRITE TABLE e2
+    SELECT percentile_approx(value, 0.5)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+FROM (select key, cast(key as double) as value from src order by key) a
+INSERT OVERWRITE TABLE e1
+    SELECT COUNT(*)
+INSERT OVERWRITE TABLE e2
+    SELECT percentile_approx(value, 0.5)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT)
+        Reducer 3 <- Reducer 2 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), UDFToDouble(key) (type: double)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string)
+                      sort order: +
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col1 (type: double)
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: double)
+                outputColumnNames: _col1
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: double)
+        Reducer 3 
+            Reduce Operator Tree:
+              Forward
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count()
+                  mode: complete
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.e1
+                Group By Operator
+                  aggregations: percentile_approx(VALUE._col0, 0.5)
+                  mode: complete
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: double)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.e2
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e1
+
+  Stage: Stage-4
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e2
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM (select key, cast(key as double) as value from src order by key) a
+INSERT OVERWRITE TABLE e1
+    SELECT COUNT(*)
+INSERT OVERWRITE TABLE e2
+    SELECT percentile_approx(value, 0.5)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@e1
+PREHOOK: Output: default@e2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM (select key, cast(key as double) as value from src order by key) a
+INSERT OVERWRITE TABLE e1
+    SELECT COUNT(*)
+INSERT OVERWRITE TABLE e2
+    SELECT percentile_approx(value, 0.5)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@e1
+POSTHOOK: Output: default@e2
+POSTHOOK: Lineage: e1.count EXPRESSION [(src)src.null, ]
+POSTHOOK: Lineage: e2.percentile EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: select * from e1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e1
+#### A masked pattern was here ####
+500
+PREHOOK: query: select * from e2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e2
+#### A masked pattern was here ####
+255.5

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby3.q.out?rev=1627210&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby3.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/multi_insert_gby3.q.out Wed Sep 24 03:16:25 2014
@@ -0,0 +1,1951 @@
+PREHOOK: query: create table e1 (key string, keyD double)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@e1
+POSTHOOK: query: create table e1 (key string, keyD double)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@e1
+PREHOOK: query: create table e2 (key string, keyD double, value string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@e2
+POSTHOOK: query: create table e2 (key string, keyD double, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@e2
+PREHOOK: query: create table e3 (key string, keyD double)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@e3
+POSTHOOK: query: create table e3 (key string, keyD double)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@e3
+PREHOOK: query: explain
+FROM (select key, cast(key as double) as keyD, value from src order by key) a
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(distinct value) group by key
+INSERT OVERWRITE TABLE e2
+    SELECT key, sum(keyD), value group by key, value
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+FROM (select key, cast(key as double) as keyD, value from src order by key) a
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(distinct value) group by key
+INSERT OVERWRITE TABLE e2
+    SELECT key, sum(keyD), value group by key, value
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT)
+        Reducer 3 <- Reducer 2 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), UDFToDouble(key) (type: double), value (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string)
+                      sort order: +
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col1 (type: double), _col2 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: double), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string), _col2 (type: string)
+                  sort order: ++
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: double)
+        Reducer 3 
+            Reduce Operator Tree:
+              Forward
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count(DISTINCT KEY._col1:0._col0)
+                  keys: KEY._col0 (type: string)
+                  mode: complete
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), UDFToDouble(_col1) (type: double)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.e1
+                Group By Operator
+                  aggregations: sum(VALUE._col0)
+                  keys: KEY._col0 (type: string), KEY._col1:0._col0 (type: string)
+                  mode: complete
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), _col2 (type: double), _col1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.e2
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e1
+
+  Stage: Stage-4
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e2
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+PREHOOK: query: explain
+FROM (select key, cast(key as double) as keyD, value from src order by key) a
+INSERT OVERWRITE TABLE e2
+    SELECT key, sum(keyD), value group by key, value
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(distinct value) group by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+FROM (select key, cast(key as double) as keyD, value from src order by key) a
+INSERT OVERWRITE TABLE e2
+    SELECT key, sum(keyD), value group by key, value
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(distinct value) group by key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT)
+        Reducer 3 <- Reducer 2 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), UDFToDouble(key) (type: double), value (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string)
+                      sort order: +
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col1 (type: double), _col2 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: double), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string), _col2 (type: string)
+                  sort order: ++
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: double)
+        Reducer 3 
+            Reduce Operator Tree:
+              Forward
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count(DISTINCT KEY._col1:0._col0)
+                  keys: KEY._col0 (type: string)
+                  mode: complete
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), UDFToDouble(_col1) (type: double)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.e1
+                Group By Operator
+                  aggregations: sum(VALUE._col0)
+                  keys: KEY._col0 (type: string), KEY._col1:0._col0 (type: string)
+                  mode: complete
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), _col2 (type: double), _col1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.e2
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e1
+
+  Stage: Stage-4
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e2
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM (select key, cast(key as double) as keyD, value from src order by key) a
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(distinct value) group by key
+INSERT OVERWRITE TABLE e2
+    SELECT key, sum(keyD), value group by key, value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@e1
+PREHOOK: Output: default@e2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM (select key, cast(key as double) as keyD, value from src order by key) a
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(distinct value) group by key
+INSERT OVERWRITE TABLE e2
+    SELECT key, sum(keyD), value group by key, value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@e1
+POSTHOOK: Output: default@e2
+POSTHOOK: Lineage: e1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: e1.keyd EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: e2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: e2.keyd EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: e2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: select * from e1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e1
+#### A masked pattern was here ####
+0	1.0
+10	1.0
+100	1.0
+103	1.0
+104	1.0
+105	1.0
+11	1.0
+111	1.0
+113	1.0
+114	1.0
+116	1.0
+118	1.0
+119	1.0
+12	1.0
+120	1.0
+125	1.0
+126	1.0
+128	1.0
+129	1.0
+131	1.0
+133	1.0
+134	1.0
+136	1.0
+137	1.0
+138	1.0
+143	1.0
+145	1.0
+146	1.0
+149	1.0
+15	1.0
+150	1.0
+152	1.0
+153	1.0
+155	1.0
+156	1.0
+157	1.0
+158	1.0
+160	1.0
+162	1.0
+163	1.0
+164	1.0
+165	1.0
+166	1.0
+167	1.0
+168	1.0
+169	1.0
+17	1.0
+170	1.0
+172	1.0
+174	1.0
+175	1.0
+176	1.0
+177	1.0
+178	1.0
+179	1.0
+18	1.0
+180	1.0
+181	1.0
+183	1.0
+186	1.0
+187	1.0
+189	1.0
+19	1.0
+190	1.0
+191	1.0
+192	1.0
+193	1.0
+194	1.0
+195	1.0
+196	1.0
+197	1.0
+199	1.0
+2	1.0
+20	1.0
+200	1.0
+201	1.0
+202	1.0
+203	1.0
+205	1.0
+207	1.0
+208	1.0
+209	1.0
+213	1.0
+214	1.0
+216	1.0
+217	1.0
+218	1.0
+219	1.0
+221	1.0
+222	1.0
+223	1.0
+224	1.0
+226	1.0
+228	1.0
+229	1.0
+230	1.0
+233	1.0
+235	1.0
+237	1.0
+238	1.0
+239	1.0
+24	1.0
+241	1.0
+242	1.0
+244	1.0
+247	1.0
+248	1.0
+249	1.0
+252	1.0
+255	1.0
+256	1.0
+257	1.0
+258	1.0
+26	1.0
+260	1.0
+262	1.0
+263	1.0
+265	1.0
+266	1.0
+27	1.0
+272	1.0
+273	1.0
+274	1.0
+275	1.0
+277	1.0
+278	1.0
+28	1.0
+280	1.0
+281	1.0
+282	1.0
+283	1.0
+284	1.0
+285	1.0
+286	1.0
+287	1.0
+288	1.0
+289	1.0
+291	1.0
+292	1.0
+296	1.0
+298	1.0
+30	1.0
+302	1.0
+305	1.0
+306	1.0
+307	1.0
+308	1.0
+309	1.0
+310	1.0
+311	1.0
+315	1.0
+316	1.0
+317	1.0
+318	1.0
+321	1.0
+322	1.0
+323	1.0
+325	1.0
+327	1.0
+33	1.0
+331	1.0
+332	1.0
+333	1.0
+335	1.0
+336	1.0
+338	1.0
+339	1.0
+34	1.0
+341	1.0
+342	1.0
+344	1.0
+345	1.0
+348	1.0
+35	1.0
+351	1.0
+353	1.0
+356	1.0
+360	1.0
+362	1.0
+364	1.0
+365	1.0
+366	1.0
+367	1.0
+368	1.0
+369	1.0
+37	1.0
+373	1.0
+374	1.0
+375	1.0
+377	1.0
+378	1.0
+379	1.0
+382	1.0
+384	1.0
+386	1.0
+389	1.0
+392	1.0
+393	1.0
+394	1.0
+395	1.0
+396	1.0
+397	1.0
+399	1.0
+4	1.0
+400	1.0
+401	1.0
+402	1.0
+403	1.0
+404	1.0
+406	1.0
+407	1.0
+409	1.0
+41	1.0
+411	1.0
+413	1.0
+414	1.0
+417	1.0
+418	1.0
+419	1.0
+42	1.0
+421	1.0
+424	1.0
+427	1.0
+429	1.0
+43	1.0
+430	1.0
+431	1.0
+432	1.0
+435	1.0
+436	1.0
+437	1.0
+438	1.0
+439	1.0
+44	1.0
+443	1.0
+444	1.0
+446	1.0
+448	1.0
+449	1.0
+452	1.0
+453	1.0
+454	1.0
+455	1.0
+457	1.0
+458	1.0
+459	1.0
+460	1.0
+462	1.0
+463	1.0
+466	1.0
+467	1.0
+468	1.0
+469	1.0
+47	1.0
+470	1.0
+472	1.0
+475	1.0
+477	1.0
+478	1.0
+479	1.0
+480	1.0
+481	1.0
+482	1.0
+483	1.0
+484	1.0
+485	1.0
+487	1.0
+489	1.0
+490	1.0
+491	1.0
+492	1.0
+493	1.0
+494	1.0
+495	1.0
+496	1.0
+497	1.0
+498	1.0
+5	1.0
+51	1.0
+53	1.0
+54	1.0
+57	1.0
+58	1.0
+64	1.0
+65	1.0
+66	1.0
+67	1.0
+69	1.0
+70	1.0
+72	1.0
+74	1.0
+76	1.0
+77	1.0
+78	1.0
+8	1.0
+80	1.0
+82	1.0
+83	1.0
+84	1.0
+85	1.0
+86	1.0
+87	1.0
+9	1.0
+90	1.0
+92	1.0
+95	1.0
+96	1.0
+97	1.0
+98	1.0
+PREHOOK: query: select * from e2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e2
+#### A masked pattern was here ####
+0	0.0	val_0
+10	10.0	val_10
+100	200.0	val_100
+103	206.0	val_103
+104	208.0	val_104
+105	105.0	val_105
+11	11.0	val_11
+111	111.0	val_111
+113	226.0	val_113
+114	114.0	val_114
+116	116.0	val_116
+118	236.0	val_118
+119	357.0	val_119
+12	24.0	val_12
+120	240.0	val_120
+125	250.0	val_125
+126	126.0	val_126
+128	384.0	val_128
+129	258.0	val_129
+131	131.0	val_131
+133	133.0	val_133
+134	268.0	val_134
+136	136.0	val_136
+137	274.0	val_137
+138	552.0	val_138
+143	143.0	val_143
+145	145.0	val_145
+146	292.0	val_146
+149	298.0	val_149
+15	30.0	val_15
+150	150.0	val_150
+152	304.0	val_152
+153	153.0	val_153
+155	155.0	val_155
+156	156.0	val_156
+157	157.0	val_157
+158	158.0	val_158
+160	160.0	val_160
+162	162.0	val_162
+163	163.0	val_163
+164	328.0	val_164
+165	330.0	val_165
+166	166.0	val_166
+167	501.0	val_167
+168	168.0	val_168
+169	676.0	val_169
+17	17.0	val_17
+170	170.0	val_170
+172	344.0	val_172
+174	348.0	val_174
+175	350.0	val_175
+176	352.0	val_176
+177	177.0	val_177
+178	178.0	val_178
+179	358.0	val_179
+18	36.0	val_18
+180	180.0	val_180
+181	181.0	val_181
+183	183.0	val_183
+186	186.0	val_186
+187	561.0	val_187
+189	189.0	val_189
+19	19.0	val_19
+190	190.0	val_190
+191	382.0	val_191
+192	192.0	val_192
+193	579.0	val_193
+194	194.0	val_194
+195	390.0	val_195
+196	196.0	val_196
+197	394.0	val_197
+199	597.0	val_199
+2	2.0	val_2
+20	20.0	val_20
+200	400.0	val_200
+201	201.0	val_201
+202	202.0	val_202
+203	406.0	val_203
+205	410.0	val_205
+207	414.0	val_207
+208	624.0	val_208
+209	418.0	val_209
+213	426.0	val_213
+214	214.0	val_214
+216	432.0	val_216
+217	434.0	val_217
+218	218.0	val_218
+219	438.0	val_219
+221	442.0	val_221
+222	222.0	val_222
+223	446.0	val_223
+224	448.0	val_224
+226	226.0	val_226
+228	228.0	val_228
+229	458.0	val_229
+230	1150.0	val_230
+233	466.0	val_233
+235	235.0	val_235
+237	474.0	val_237
+238	476.0	val_238
+239	478.0	val_239
+24	48.0	val_24
+241	241.0	val_241
+242	484.0	val_242
+244	244.0	val_244
+247	247.0	val_247
+248	248.0	val_248
+249	249.0	val_249
+252	252.0	val_252
+255	510.0	val_255
+256	512.0	val_256
+257	257.0	val_257
+258	258.0	val_258
+26	52.0	val_26
+260	260.0	val_260
+262	262.0	val_262
+263	263.0	val_263
+265	530.0	val_265
+266	266.0	val_266
+27	27.0	val_27
+272	544.0	val_272
+273	819.0	val_273
+274	274.0	val_274
+275	275.0	val_275
+277	1108.0	val_277
+278	556.0	val_278
+28	28.0	val_28
+280	560.0	val_280
+281	562.0	val_281
+282	564.0	val_282
+283	283.0	val_283
+284	284.0	val_284
+285	285.0	val_285
+286	286.0	val_286
+287	287.0	val_287
+288	576.0	val_288
+289	289.0	val_289
+291	291.0	val_291
+292	292.0	val_292
+296	296.0	val_296
+298	894.0	val_298
+30	30.0	val_30
+302	302.0	val_302
+305	305.0	val_305
+306	306.0	val_306
+307	614.0	val_307
+308	308.0	val_308
+309	618.0	val_309
+310	310.0	val_310
+311	933.0	val_311
+315	315.0	val_315
+316	948.0	val_316
+317	634.0	val_317
+318	954.0	val_318
+321	642.0	val_321
+322	644.0	val_322
+323	323.0	val_323
+325	650.0	val_325
+327	981.0	val_327
+33	33.0	val_33
+331	662.0	val_331
+332	332.0	val_332
+333	666.0	val_333
+335	335.0	val_335
+336	336.0	val_336
+338	338.0	val_338
+339	339.0	val_339
+34	34.0	val_34
+341	341.0	val_341
+342	684.0	val_342
+344	688.0	val_344
+345	345.0	val_345
+348	1740.0	val_348
+35	105.0	val_35
+351	351.0	val_351
+353	706.0	val_353
+356	356.0	val_356
+360	360.0	val_360
+362	362.0	val_362
+364	364.0	val_364
+365	365.0	val_365
+366	366.0	val_366
+367	734.0	val_367
+368	368.0	val_368
+369	1107.0	val_369
+37	74.0	val_37
+373	373.0	val_373
+374	374.0	val_374
+375	375.0	val_375
+377	377.0	val_377
+378	378.0	val_378
+379	379.0	val_379
+382	764.0	val_382
+384	1152.0	val_384
+386	386.0	val_386
+389	389.0	val_389
+392	392.0	val_392
+393	393.0	val_393
+394	394.0	val_394
+395	790.0	val_395
+396	1188.0	val_396
+397	794.0	val_397
+399	798.0	val_399
+4	4.0	val_4
+400	400.0	val_400
+401	2005.0	val_401
+402	402.0	val_402
+403	1209.0	val_403
+404	808.0	val_404
+406	1624.0	val_406
+407	407.0	val_407
+409	1227.0	val_409
+41	41.0	val_41
+411	411.0	val_411
+413	826.0	val_413
+414	828.0	val_414
+417	1251.0	val_417
+418	418.0	val_418
+419	419.0	val_419
+42	84.0	val_42
+421	421.0	val_421
+424	848.0	val_424
+427	427.0	val_427
+429	858.0	val_429
+43	43.0	val_43
+430	1290.0	val_430
+431	1293.0	val_431
+432	432.0	val_432
+435	435.0	val_435
+436	436.0	val_436
+437	437.0	val_437
+438	1314.0	val_438
+439	878.0	val_439
+44	44.0	val_44
+443	443.0	val_443
+444	444.0	val_444
+446	446.0	val_446
+448	448.0	val_448
+449	449.0	val_449
+452	452.0	val_452
+453	453.0	val_453
+454	1362.0	val_454
+455	455.0	val_455
+457	457.0	val_457
+458	916.0	val_458
+459	918.0	val_459
+460	460.0	val_460
+462	924.0	val_462
+463	926.0	val_463
+466	1398.0	val_466
+467	467.0	val_467
+468	1872.0	val_468
+469	2345.0	val_469
+47	47.0	val_47
+470	470.0	val_470
+472	472.0	val_472
+475	475.0	val_475
+477	477.0	val_477
+478	956.0	val_478
+479	479.0	val_479
+480	1440.0	val_480
+481	481.0	val_481
+482	482.0	val_482
+483	483.0	val_483
+484	484.0	val_484
+485	485.0	val_485
+487	487.0	val_487
+489	1956.0	val_489
+490	490.0	val_490
+491	491.0	val_491
+492	984.0	val_492
+493	493.0	val_493
+494	494.0	val_494
+495	495.0	val_495
+496	496.0	val_496
+497	497.0	val_497
+498	1494.0	val_498
+5	15.0	val_5
+51	102.0	val_51
+53	53.0	val_53
+54	54.0	val_54
+57	57.0	val_57
+58	116.0	val_58
+64	64.0	val_64
+65	65.0	val_65
+66	66.0	val_66
+67	134.0	val_67
+69	69.0	val_69
+70	210.0	val_70
+72	144.0	val_72
+74	74.0	val_74
+76	152.0	val_76
+77	77.0	val_77
+78	78.0	val_78
+8	8.0	val_8
+80	80.0	val_80
+82	82.0	val_82
+83	166.0	val_83
+84	168.0	val_84
+85	85.0	val_85
+86	86.0	val_86
+87	87.0	val_87
+9	9.0	val_9
+90	270.0	val_90
+92	92.0	val_92
+95	190.0	val_95
+96	96.0	val_96
+97	194.0	val_97
+98	196.0	val_98
+PREHOOK: query: FROM (select key, cast(key as double) as keyD, value from src order by key) a
+INSERT OVERWRITE TABLE e2
+    SELECT key, sum(keyD), value group by key, value
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(distinct value) group by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@e1
+PREHOOK: Output: default@e2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM (select key, cast(key as double) as keyD, value from src order by key) a
+INSERT OVERWRITE TABLE e2
+    SELECT key, sum(keyD), value group by key, value
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(distinct value) group by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@e1
+POSTHOOK: Output: default@e2
+POSTHOOK: Lineage: e1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: e1.keyd EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: e2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: e2.keyd EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: e2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: select * from e1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e1
+#### A masked pattern was here ####
+0	1.0
+10	1.0
+100	1.0
+103	1.0
+104	1.0
+105	1.0
+11	1.0
+111	1.0
+113	1.0
+114	1.0
+116	1.0
+118	1.0
+119	1.0
+12	1.0
+120	1.0
+125	1.0
+126	1.0
+128	1.0
+129	1.0
+131	1.0
+133	1.0
+134	1.0
+136	1.0
+137	1.0
+138	1.0
+143	1.0
+145	1.0
+146	1.0
+149	1.0
+15	1.0
+150	1.0
+152	1.0
+153	1.0
+155	1.0
+156	1.0
+157	1.0
+158	1.0
+160	1.0
+162	1.0
+163	1.0
+164	1.0
+165	1.0
+166	1.0
+167	1.0
+168	1.0
+169	1.0
+17	1.0
+170	1.0
+172	1.0
+174	1.0
+175	1.0
+176	1.0
+177	1.0
+178	1.0
+179	1.0
+18	1.0
+180	1.0
+181	1.0
+183	1.0
+186	1.0
+187	1.0
+189	1.0
+19	1.0
+190	1.0
+191	1.0
+192	1.0
+193	1.0
+194	1.0
+195	1.0
+196	1.0
+197	1.0
+199	1.0
+2	1.0
+20	1.0
+200	1.0
+201	1.0
+202	1.0
+203	1.0
+205	1.0
+207	1.0
+208	1.0
+209	1.0
+213	1.0
+214	1.0
+216	1.0
+217	1.0
+218	1.0
+219	1.0
+221	1.0
+222	1.0
+223	1.0
+224	1.0
+226	1.0
+228	1.0
+229	1.0
+230	1.0
+233	1.0
+235	1.0
+237	1.0
+238	1.0
+239	1.0
+24	1.0
+241	1.0
+242	1.0
+244	1.0
+247	1.0
+248	1.0
+249	1.0
+252	1.0
+255	1.0
+256	1.0
+257	1.0
+258	1.0
+26	1.0
+260	1.0
+262	1.0
+263	1.0
+265	1.0
+266	1.0
+27	1.0
+272	1.0
+273	1.0
+274	1.0
+275	1.0
+277	1.0
+278	1.0
+28	1.0
+280	1.0
+281	1.0
+282	1.0
+283	1.0
+284	1.0
+285	1.0
+286	1.0
+287	1.0
+288	1.0
+289	1.0
+291	1.0
+292	1.0
+296	1.0
+298	1.0
+30	1.0
+302	1.0
+305	1.0
+306	1.0
+307	1.0
+308	1.0
+309	1.0
+310	1.0
+311	1.0
+315	1.0
+316	1.0
+317	1.0
+318	1.0
+321	1.0
+322	1.0
+323	1.0
+325	1.0
+327	1.0
+33	1.0
+331	1.0
+332	1.0
+333	1.0
+335	1.0
+336	1.0
+338	1.0
+339	1.0
+34	1.0
+341	1.0
+342	1.0
+344	1.0
+345	1.0
+348	1.0
+35	1.0
+351	1.0
+353	1.0
+356	1.0
+360	1.0
+362	1.0
+364	1.0
+365	1.0
+366	1.0
+367	1.0
+368	1.0
+369	1.0
+37	1.0
+373	1.0
+374	1.0
+375	1.0
+377	1.0
+378	1.0
+379	1.0
+382	1.0
+384	1.0
+386	1.0
+389	1.0
+392	1.0
+393	1.0
+394	1.0
+395	1.0
+396	1.0
+397	1.0
+399	1.0
+4	1.0
+400	1.0
+401	1.0
+402	1.0
+403	1.0
+404	1.0
+406	1.0
+407	1.0
+409	1.0
+41	1.0
+411	1.0
+413	1.0
+414	1.0
+417	1.0
+418	1.0
+419	1.0
+42	1.0
+421	1.0
+424	1.0
+427	1.0
+429	1.0
+43	1.0
+430	1.0
+431	1.0
+432	1.0
+435	1.0
+436	1.0
+437	1.0
+438	1.0
+439	1.0
+44	1.0
+443	1.0
+444	1.0
+446	1.0
+448	1.0
+449	1.0
+452	1.0
+453	1.0
+454	1.0
+455	1.0
+457	1.0
+458	1.0
+459	1.0
+460	1.0
+462	1.0
+463	1.0
+466	1.0
+467	1.0
+468	1.0
+469	1.0
+47	1.0
+470	1.0
+472	1.0
+475	1.0
+477	1.0
+478	1.0
+479	1.0
+480	1.0
+481	1.0
+482	1.0
+483	1.0
+484	1.0
+485	1.0
+487	1.0
+489	1.0
+490	1.0
+491	1.0
+492	1.0
+493	1.0
+494	1.0
+495	1.0
+496	1.0
+497	1.0
+498	1.0
+5	1.0
+51	1.0
+53	1.0
+54	1.0
+57	1.0
+58	1.0
+64	1.0
+65	1.0
+66	1.0
+67	1.0
+69	1.0
+70	1.0
+72	1.0
+74	1.0
+76	1.0
+77	1.0
+78	1.0
+8	1.0
+80	1.0
+82	1.0
+83	1.0
+84	1.0
+85	1.0
+86	1.0
+87	1.0
+9	1.0
+90	1.0
+92	1.0
+95	1.0
+96	1.0
+97	1.0
+98	1.0
+PREHOOK: query: select * from e2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e2
+#### A masked pattern was here ####
+0	0.0	val_0
+10	10.0	val_10
+100	200.0	val_100
+103	206.0	val_103
+104	208.0	val_104
+105	105.0	val_105
+11	11.0	val_11
+111	111.0	val_111
+113	226.0	val_113
+114	114.0	val_114
+116	116.0	val_116
+118	236.0	val_118
+119	357.0	val_119
+12	24.0	val_12
+120	240.0	val_120
+125	250.0	val_125
+126	126.0	val_126
+128	384.0	val_128
+129	258.0	val_129
+131	131.0	val_131
+133	133.0	val_133
+134	268.0	val_134
+136	136.0	val_136
+137	274.0	val_137
+138	552.0	val_138
+143	143.0	val_143
+145	145.0	val_145
+146	292.0	val_146
+149	298.0	val_149
+15	30.0	val_15
+150	150.0	val_150
+152	304.0	val_152
+153	153.0	val_153
+155	155.0	val_155
+156	156.0	val_156
+157	157.0	val_157
+158	158.0	val_158
+160	160.0	val_160
+162	162.0	val_162
+163	163.0	val_163
+164	328.0	val_164
+165	330.0	val_165
+166	166.0	val_166
+167	501.0	val_167
+168	168.0	val_168
+169	676.0	val_169
+17	17.0	val_17
+170	170.0	val_170
+172	344.0	val_172
+174	348.0	val_174
+175	350.0	val_175
+176	352.0	val_176
+177	177.0	val_177
+178	178.0	val_178
+179	358.0	val_179
+18	36.0	val_18
+180	180.0	val_180
+181	181.0	val_181
+183	183.0	val_183
+186	186.0	val_186
+187	561.0	val_187
+189	189.0	val_189
+19	19.0	val_19
+190	190.0	val_190
+191	382.0	val_191
+192	192.0	val_192
+193	579.0	val_193
+194	194.0	val_194
+195	390.0	val_195
+196	196.0	val_196
+197	394.0	val_197
+199	597.0	val_199
+2	2.0	val_2
+20	20.0	val_20
+200	400.0	val_200
+201	201.0	val_201
+202	202.0	val_202
+203	406.0	val_203
+205	410.0	val_205
+207	414.0	val_207
+208	624.0	val_208
+209	418.0	val_209
+213	426.0	val_213
+214	214.0	val_214
+216	432.0	val_216
+217	434.0	val_217
+218	218.0	val_218
+219	438.0	val_219
+221	442.0	val_221
+222	222.0	val_222
+223	446.0	val_223
+224	448.0	val_224
+226	226.0	val_226
+228	228.0	val_228
+229	458.0	val_229
+230	1150.0	val_230
+233	466.0	val_233
+235	235.0	val_235
+237	474.0	val_237
+238	476.0	val_238
+239	478.0	val_239
+24	48.0	val_24
+241	241.0	val_241
+242	484.0	val_242
+244	244.0	val_244
+247	247.0	val_247
+248	248.0	val_248
+249	249.0	val_249
+252	252.0	val_252
+255	510.0	val_255
+256	512.0	val_256
+257	257.0	val_257
+258	258.0	val_258
+26	52.0	val_26
+260	260.0	val_260
+262	262.0	val_262
+263	263.0	val_263
+265	530.0	val_265
+266	266.0	val_266
+27	27.0	val_27
+272	544.0	val_272
+273	819.0	val_273
+274	274.0	val_274
+275	275.0	val_275
+277	1108.0	val_277
+278	556.0	val_278
+28	28.0	val_28
+280	560.0	val_280
+281	562.0	val_281
+282	564.0	val_282
+283	283.0	val_283
+284	284.0	val_284
+285	285.0	val_285
+286	286.0	val_286
+287	287.0	val_287
+288	576.0	val_288
+289	289.0	val_289
+291	291.0	val_291
+292	292.0	val_292
+296	296.0	val_296
+298	894.0	val_298
+30	30.0	val_30
+302	302.0	val_302
+305	305.0	val_305
+306	306.0	val_306
+307	614.0	val_307
+308	308.0	val_308
+309	618.0	val_309
+310	310.0	val_310
+311	933.0	val_311
+315	315.0	val_315
+316	948.0	val_316
+317	634.0	val_317
+318	954.0	val_318
+321	642.0	val_321
+322	644.0	val_322
+323	323.0	val_323
+325	650.0	val_325
+327	981.0	val_327
+33	33.0	val_33
+331	662.0	val_331
+332	332.0	val_332
+333	666.0	val_333
+335	335.0	val_335
+336	336.0	val_336
+338	338.0	val_338
+339	339.0	val_339
+34	34.0	val_34
+341	341.0	val_341
+342	684.0	val_342
+344	688.0	val_344
+345	345.0	val_345
+348	1740.0	val_348
+35	105.0	val_35
+351	351.0	val_351
+353	706.0	val_353
+356	356.0	val_356
+360	360.0	val_360
+362	362.0	val_362
+364	364.0	val_364
+365	365.0	val_365
+366	366.0	val_366
+367	734.0	val_367
+368	368.0	val_368
+369	1107.0	val_369
+37	74.0	val_37
+373	373.0	val_373
+374	374.0	val_374
+375	375.0	val_375
+377	377.0	val_377
+378	378.0	val_378
+379	379.0	val_379
+382	764.0	val_382
+384	1152.0	val_384
+386	386.0	val_386
+389	389.0	val_389
+392	392.0	val_392
+393	393.0	val_393
+394	394.0	val_394
+395	790.0	val_395
+396	1188.0	val_396
+397	794.0	val_397
+399	798.0	val_399
+4	4.0	val_4
+400	400.0	val_400
+401	2005.0	val_401
+402	402.0	val_402
+403	1209.0	val_403
+404	808.0	val_404
+406	1624.0	val_406
+407	407.0	val_407
+409	1227.0	val_409
+41	41.0	val_41
+411	411.0	val_411
+413	826.0	val_413
+414	828.0	val_414
+417	1251.0	val_417
+418	418.0	val_418
+419	419.0	val_419
+42	84.0	val_42
+421	421.0	val_421
+424	848.0	val_424
+427	427.0	val_427
+429	858.0	val_429
+43	43.0	val_43
+430	1290.0	val_430
+431	1293.0	val_431
+432	432.0	val_432
+435	435.0	val_435
+436	436.0	val_436
+437	437.0	val_437
+438	1314.0	val_438
+439	878.0	val_439
+44	44.0	val_44
+443	443.0	val_443
+444	444.0	val_444
+446	446.0	val_446
+448	448.0	val_448
+449	449.0	val_449
+452	452.0	val_452
+453	453.0	val_453
+454	1362.0	val_454
+455	455.0	val_455
+457	457.0	val_457
+458	916.0	val_458
+459	918.0	val_459
+460	460.0	val_460
+462	924.0	val_462
+463	926.0	val_463
+466	1398.0	val_466
+467	467.0	val_467
+468	1872.0	val_468
+469	2345.0	val_469
+47	47.0	val_47
+470	470.0	val_470
+472	472.0	val_472
+475	475.0	val_475
+477	477.0	val_477
+478	956.0	val_478
+479	479.0	val_479
+480	1440.0	val_480
+481	481.0	val_481
+482	482.0	val_482
+483	483.0	val_483
+484	484.0	val_484
+485	485.0	val_485
+487	487.0	val_487
+489	1956.0	val_489
+490	490.0	val_490
+491	491.0	val_491
+492	984.0	val_492
+493	493.0	val_493
+494	494.0	val_494
+495	495.0	val_495
+496	496.0	val_496
+497	497.0	val_497
+498	1494.0	val_498
+5	15.0	val_5
+51	102.0	val_51
+53	53.0	val_53
+54	54.0	val_54
+57	57.0	val_57
+58	116.0	val_58
+64	64.0	val_64
+65	65.0	val_65
+66	66.0	val_66
+67	134.0	val_67
+69	69.0	val_69
+70	210.0	val_70
+72	144.0	val_72
+74	74.0	val_74
+76	152.0	val_76
+77	77.0	val_77
+78	78.0	val_78
+8	8.0	val_8
+80	80.0	val_80
+82	82.0	val_82
+83	166.0	val_83
+84	168.0	val_84
+85	85.0	val_85
+86	86.0	val_86
+87	87.0	val_87
+9	9.0	val_9
+90	270.0	val_90
+92	92.0	val_92
+95	190.0	val_95
+96	96.0	val_96
+97	194.0	val_97
+98	196.0	val_98
+PREHOOK: query: explain
+from src
+insert overwrite table e1
+select key, count(distinct value) group by key
+insert overwrite table e3
+select value, count(distinct key) group by value
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+from src
+insert overwrite table e1
+select key, count(distinct value) group by key
+insert overwrite table e3
+select value, count(distinct key) group by value
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-4 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-4, Stage-5
+  Stage-0 depends on stages: Stage-3
+  Stage-6 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-7 depends on stages: Stage-1
+  Stage-5 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-4
+    Spark
+      Edges:
+        Reducer 4 <- Map 1 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: key, value
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: count(DISTINCT value)
+                      keys: key (type: string), value (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string), _col1 (type: string)
+                        sort order: ++
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+        Reducer 4 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(DISTINCT KEY._col1:0._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), UDFToDouble(_col1) (type: double)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.e1
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e1
+
+  Stage: Stage-6
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e3
+
+  Stage: Stage-7
+    Stats-Aggr Operator
+
+  Stage: Stage-5
+    Spark
+      Edges:
+        Reducer 5 <- Map 2 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  Select Operator
+                    expressions: value (type: string), key (type: string)
+                    outputColumnNames: value, key
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: count(DISTINCT key)
+                      keys: value (type: string), key (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string), _col1 (type: string)
+                        sort order: ++
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+        Reducer 5 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(DISTINCT KEY._col1:0._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), UDFToDouble(_col1) (type: double)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.e3
+
+PREHOOK: query: explain
+FROM (select key, cast(key as double) as keyD, value from src order by key) a
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(distinct value) group by key
+INSERT OVERWRITE TABLE e2
+    SELECT key, sum(keyD), value group by key, value
+INSERT overwrite table e3
+    SELECT key, COUNT(distinct keyD) group by key, keyD, value
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+FROM (select key, cast(key as double) as keyD, value from src order by key) a
+INSERT OVERWRITE TABLE e1
+    SELECT key, COUNT(distinct value) group by key
+INSERT OVERWRITE TABLE e2
+    SELECT key, sum(keyD), value group by key, value
+INSERT overwrite table e3
+    SELECT key, COUNT(distinct keyD) group by key, keyD, value
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-3 is a root stage
+  Stage-5 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-5, Stage-6
+  Stage-0 depends on stages: Stage-4
+  Stage-7 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-4
+  Stage-8 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-4
+  Stage-9 depends on stages: Stage-2
+  Stage-6 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-3
+    Spark
+      Edges:
+        Reducer 4 <- Map 3 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), UDFToDouble(key) (type: double), value (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string), _col1 (type: double), _col2 (type: string)
+                      sort order: ++++
+                      Map-reduce partition columns: _col0 (type: string), _col1 (type: double), _col2 (type: string)
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col1 (type: double), _col2 (type: string)
+        Reducer 4 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: double), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-5
+    Spark
+      Edges:
+        Reducer 5 <- Map 1 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: double), _col2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string), _col2 (type: string)
+                      sort order: ++
+                      Map-reduce partition columns: _col0 (type: string)
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col1 (type: double)
+        Reducer 5 
+            Reduce Operator Tree:
+              Forward
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count(DISTINCT KEY._col1:0._col0)
+                  keys: KEY._col0 (type: string)
+                  mode: complete
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), UDFToDouble(_col1) (type: double)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.e1
+                Group By Operator
+                  aggregations: sum(VALUE._col0)
+                  keys: KEY._col0 (type: string), KEY._col1:0._col0 (type: string)
+                  mode: complete
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), _col2 (type: double), _col1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.e2
+
+  Stage: Stage-4
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e1
+
+  Stage: Stage-7
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e2
+
+  Stage: Stage-8
+    Stats-Aggr Operator
+
+  Stage: Stage-2
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.e3
+
+  Stage: Stage-9
+    Stats-Aggr Operator
+
+  Stage: Stage-6
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: double), _col2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: count(DISTINCT _col1)
+                      keys: _col0 (type: string), _col1 (type: double), _col2 (type: string)
+                      mode: complete
+                      outputColumnNames: _col0, _col1, _col2, _col3
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                      Select Operator
+                        expressions: _col0 (type: string), UDFToDouble(_col3) (type: double)
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              name: default.e3
+