You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/08/22 22:34:38 UTC

svn commit: r1619924 [1/2] - in /hive/branches/spark: itests/src/test/resources/ ql/src/test/results/clientpositive/spark/

Author: brock
Date: Fri Aug 22 20:34:37 2014
New Revision: 1619924

URL: http://svn.apache.org/r1619924
Log:
HIVE-7702 - Start running .q file tests on spark [Spark Branch] (Chinna Rao Lalam via Brock)

Added:
    hive/branches/spark/ql/src/test/results/clientpositive/spark/enforce_order.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/having.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/insert1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into2.q.out
Modified:
    hive/branches/spark/itests/src/test/resources/testconfiguration.properties

Modified: hive/branches/spark/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/src/test/resources/testconfiguration.properties?rev=1619924&r1=1619923&r2=1619924&view=diff
==============================================================================
--- hive/branches/spark/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/branches/spark/itests/src/test/resources/testconfiguration.properties Fri Aug 22 20:34:37 2014
@@ -385,4 +385,12 @@ spark.query.files=spark_test.q \
    union28.q \
    union29.q \
    union30.q \
-   union33.q
+   union33.q \
+   enforce_order.q \
+   groupby1.q \
+   groupby2.q \
+   groupby3.q \
+   having.q \
+   insert1.q \
+   insert_into1.q \
+   insert_into2.q
\ No newline at end of file

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/enforce_order.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/enforce_order.q.out?rev=1619924&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/enforce_order.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/enforce_order.q.out Fri Aug 22 20:34:37 2014
@@ -0,0 +1,82 @@
+PREHOOK: query: drop table table_asc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table table_asc
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table table_desc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table table_desc
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table table_asc(key string, value string) clustered by (key) sorted by (key ASC) into 1 BUCKETS
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table_asc
+POSTHOOK: query: create table table_asc(key string, value string) clustered by (key) sorted by (key ASC) into 1 BUCKETS
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table_asc
+PREHOOK: query: create table table_desc(key string, value string) clustered by (key) sorted by (key DESC) into 1 BUCKETS
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table_desc
+POSTHOOK: query: create table table_desc(key string, value string) clustered by (key) sorted by (key DESC) into 1 BUCKETS
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table_desc
+PREHOOK: query: insert overwrite table table_asc select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@table_asc
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: insert overwrite table table_asc select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@table_asc
+POSTHOOK: Lineage: table_asc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: table_asc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table table_desc select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@table_desc
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: insert overwrite table table_desc select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@table_desc
+POSTHOOK: Lineage: table_desc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: table_desc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: select * from table_asc limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table_asc
+#### A masked pattern was here ####
+POSTHOOK: query: select * from table_asc limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table_asc
+#### A masked pattern was here ####
+0	val_0
+0	val_0
+0	val_0
+10	val_10
+100	val_100
+100	val_100
+103	val_103
+103	val_103
+104	val_104
+104	val_104
+PREHOOK: query: select * from table_desc limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table_desc
+#### A masked pattern was here ####
+POSTHOOK: query: select * from table_desc limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table_desc
+#### A masked pattern was here ####
+98	val_98
+98	val_98
+97	val_97
+97	val_97
+96	val_96
+95	val_95
+95	val_95
+92	val_92
+90	val_90
+90	val_90

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby1.q.out?rev=1619924&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby1.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby1.q.out Fri Aug 22 20:34:37 2014
@@ -0,0 +1,422 @@
+PREHOOK: query: CREATE TABLE dest_g1(key INT, value DOUBLE) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest_g1
+POSTHOOK: query: CREATE TABLE dest_g1(key INT, value DOUBLE) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest_g1
+PREHOOK: query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest_g1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest_g1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT)
+        Reducer 3 <- Reducer 2 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: key, value
+                    Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: key (type: string)
+                      sort order: +
+                      Map-reduce partition columns: rand() (type: double)
+                      Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: substr(value, 5) (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: partial1
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: double)
+        Reducer 3 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: final
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest_g1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest_g1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM src INSERT OVERWRITE TABLE dest_g1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest_g1
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM src INSERT OVERWRITE TABLE dest_g1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest_g1
+POSTHOOK: Lineage: dest_g1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest_g1.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT dest_g1.* FROM dest_g1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest_g1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest_g1.* FROM dest_g1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest_g1
+#### A masked pattern was here ####
+168	168.0
+170	170.0
+436	436.0
+364	364.0
+209	418.0
+11	11.0
+374	374.0
+403	1209.0
+195	390.0
+252	252.0
+146	292.0
+95	190.0
+118	236.0
+189	189.0
+199	597.0
+196	196.0
+100	200.0
+382	764.0
+30	30.0
+455	455.0
+498	1494.0
+111	111.0
+287	287.0
+248	248.0
+19	19.0
+311	933.0
+125	250.0
+178	178.0
+160	160.0
+221	442.0
+87	87.0
+406	1624.0
+76	152.0
+335	335.0
+459	918.0
+263	263.0
+166	166.0
+463	926.0
+28	28.0
+223	446.0
+174	348.0
+203	406.0
+208	624.0
+4	4.0
+404	808.0
+377	377.0
+54	54.0
+368	368.0
+37	74.0
+280	560.0
+57	57.0
+47	47.0
+308	308.0
+291	291.0
+278	556.0
+98	196.0
+484	484.0
+409	1227.0
+155	155.0
+260	260.0
+317	634.0
+429	858.0
+309	618.0
+284	284.0
+413	826.0
+417	1251.0
+27	27.0
+296	296.0
+67	134.0
+244	244.0
+230	1150.0
+96	96.0
+183	183.0
+475	475.0
+249	249.0
+289	289.0
+427	427.0
+418	418.0
+181	181.0
+472	472.0
+454	1362.0
+207	414.0
+310	310.0
+233	466.0
+194	194.0
+224	448.0
+129	258.0
+10	10.0
+360	360.0
+460	460.0
+12	24.0
+5	15.0
+481	481.0
+85	85.0
+58	116.0
+369	1107.0
+482	482.0
+214	214.0
+177	177.0
+193	579.0
+9	9.0
+34	34.0
+378	378.0
+419	419.0
+165	330.0
+201	201.0
+241	241.0
+281	562.0
+397	794.0
+277	1108.0
+272	544.0
+285	285.0
+292	292.0
+298	894.0
+332	332.0
+43	43.0
+400	400.0
+321	642.0
+131	131.0
+333	666.0
+53	53.0
+242	484.0
+286	286.0
+396	1188.0
+389	389.0
+477	477.0
+421	421.0
+487	487.0
+375	375.0
+327	981.0
+258	258.0
+307	614.0
+392	392.0
+82	82.0
+41	41.0
+83	166.0
+490	490.0
+134	268.0
+78	78.0
+64	64.0
+256	512.0
+401	2005.0
+402	402.0
+393	393.0
+362	362.0
+435	435.0
+288	576.0
+113	226.0
+104	208.0
+20	20.0
+176	352.0
+448	448.0
+237	474.0
+394	394.0
+162	162.0
+480	1440.0
+384	1152.0
+323	323.0
+497	497.0
+446	446.0
+457	457.0
+438	1314.0
+222	222.0
+26	52.0
+386	386.0
+467	467.0
+468	1872.0
+218	218.0
+51	102.0
+163	163.0
+315	315.0
+150	150.0
+273	819.0
+366	366.0
+226	226.0
+342	684.0
+74	74.0
+345	345.0
+424	848.0
+491	491.0
+431	1293.0
+395	790.0
+302	302.0
+149	298.0
+105	105.0
+167	501.0
+172	344.0
+492	984.0
+239	478.0
+69	69.0
+66	66.0
+70	210.0
+2	2.0
+449	449.0
+179	358.0
+458	916.0
+202	202.0
+356	356.0
+430	1290.0
+466	1398.0
+478	956.0
+338	338.0
+453	453.0
+493	493.0
+365	365.0
+379	379.0
+353	706.0
+247	247.0
+200	400.0
+283	283.0
+158	158.0
+485	485.0
+103	206.0
+275	275.0
+138	552.0
+452	452.0
+217	434.0
+325	650.0
+274	274.0
+197	394.0
+351	351.0
+336	336.0
+399	798.0
+24	48.0
+133	133.0
+305	305.0
+367	734.0
+157	157.0
+262	262.0
+128	384.0
+414	828.0
+116	116.0
+437	437.0
+156	156.0
+479	479.0
+120	240.0
+153	153.0
+407	407.0
+80	80.0
+411	411.0
+84	168.0
+322	644.0
+44	44.0
+443	443.0
+187	561.0
+341	341.0
+238	476.0
+255	510.0
+8	8.0
+143	143.0
+213	426.0
+126	126.0
+219	438.0
+318	954.0
+169	676.0
+15	30.0
+92	92.0
+33	33.0
+235	235.0
+114	114.0
+145	145.0
+180	180.0
+191	382.0
+444	444.0
+496	496.0
+432	432.0
+344	688.0
+470	470.0
+483	483.0
+495	495.0
+339	339.0
+35	105.0
+18	36.0
+373	373.0
+152	304.0
+348	1740.0
+462	924.0
+316	948.0
+494	494.0
+229	458.0
+331	662.0
+216	432.0
+72	144.0
+90	270.0
+164	328.0
+97	194.0
+175	350.0
+119	357.0
+190	190.0
+0	0.0
+192	192.0
+489	1956.0
+65	65.0
+228	228.0
+306	306.0
+469	2345.0
+77	77.0
+282	564.0
+17	17.0
+137	274.0
+266	266.0
+42	84.0
+205	410.0
+257	257.0
+186	186.0
+136	136.0
+265	530.0
+439	878.0
+86	86.0

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby2.q.out?rev=1619924&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby2.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby2.q.out Fri Aug 22 20:34:37 2014
@@ -0,0 +1,112 @@
+PREHOOK: query: CREATE TABLE dest_g2(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest_g2
+POSTHOOK: query: CREATE TABLE dest_g2(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest_g2
+PREHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: key, value
+                    Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: substr(key, 1, 1) (type: string), substr(value, 5) (type: string)
+                      sort order: ++
+                      Map-reduce partition columns: substr(key, 1, 1) (type: string)
+                      Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(DISTINCT KEY._col1:0._col0), sum(KEY._col1:0._col0)
+                keys: KEY._col0 (type: string)
+                mode: complete
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), UDFToInteger(_col1) (type: int), concat(_col0, _col2) (type: string)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest_g2
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest_g2
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest_g2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest_g2
+POSTHOOK: Lineage: dest_g2.c1 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest_g2.c2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest_g2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: SELECT dest_g2.* FROM dest_g2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest_g2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest_g2.* FROM dest_g2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest_g2
+#### A masked pattern was here ####
+0	1	00.0
+1	71	116414.0
+2	69	225571.0
+3	62	332004.0
+4	74	452763.0
+5	6	5397.0
+6	5	6398.0
+7	6	7735.0
+8	8	8762.0
+9	7	91047.0

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3.q.out?rev=1619924&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby3.q.out Fri Aug 22 20:34:37 2014
@@ -0,0 +1,156 @@
+PREHOOK: query: CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE, c6 DOUBLE, c7 DOUBLE, c8 DOUBLE, c9 DOUBLE) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
+POSTHOOK: query: CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE, c6 DOUBLE, c7 DOUBLE, c8 DOUBLE, c9 DOUBLE) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT 
+  sum(substr(src.value,5)), 
+  avg(substr(src.value,5)), 
+  avg(DISTINCT substr(src.value,5)), 
+  max(substr(src.value,5)),
+  min(substr(src.value,5)), 
+  std(substr(src.value,5)),
+  stddev_samp(substr(src.value,5)),
+  variance(substr(src.value,5)),
+  var_samp(substr(src.value,5))
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT 
+  sum(substr(src.value,5)), 
+  avg(substr(src.value,5)), 
+  avg(DISTINCT substr(src.value,5)), 
+  max(substr(src.value,5)),
+  min(substr(src.value,5)), 
+  std(substr(src.value,5)),
+  stddev_samp(substr(src.value,5)),
+  variance(substr(src.value,5)),
+  var_samp(substr(src.value,5))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT)
+        Reducer 3 <- Reducer 2 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: value (type: string)
+                    outputColumnNames: value
+                    Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: substr(value, 5) (type: string)
+                      sort order: +
+                      Map-reduce partition columns: substr(value, 5) (type: string)
+                      Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(KEY._col0:0._col0), avg(KEY._col0:0._col0), avg(DISTINCT KEY._col0:0._col0), max(KEY._col0:0._col0), min(KEY._col0:0._col0), std(KEY._col0:0._col0), stddev_samp(KEY._col0:0._col0), variance(KEY._col0:0._col0), var_samp(KEY._col0:0._col0)
+                mode: partial1
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+                Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: double), _col1 (type: struct<count:bigint,sum:double,input:string>), _col2 (type: struct<count:bigint,sum:double,input:string>), _col3 (type: string), _col4 (type: string), _col5 (type: struct<count:bigint,sum:double,variance:double>), _col6 (type: struct<count:bigint,sum:double,variance:double>), _col7 (type: struct<count:bigint,sum:double,variance:double>), _col8 (type: struct<count:bigint,sum:double,variance:double>)
+        Reducer 3 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8)
+                mode: final
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+                Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double)
+                  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+                  Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT 
+  sum(substr(src.value,5)), 
+  avg(substr(src.value,5)), 
+  avg(DISTINCT substr(src.value,5)), 
+  max(substr(src.value,5)), 
+  min(substr(src.value,5)), 
+  std(substr(src.value,5)),
+  stddev_samp(substr(src.value,5)),
+  variance(substr(src.value,5)),
+  var_samp(substr(src.value,5))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT 
+  sum(substr(src.value,5)), 
+  avg(substr(src.value,5)), 
+  avg(DISTINCT substr(src.value,5)), 
+  max(substr(src.value,5)), 
+  min(substr(src.value,5)), 
+  std(substr(src.value,5)),
+  stddev_samp(substr(src.value,5)),
+  variance(substr(src.value,5)),
+  var_samp(substr(src.value,5))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.c1 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c2 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c3 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c4 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c5 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c6 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c7 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c8 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c9 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+130091.0	260.182	256.10355987055016	98.0	0.0	142.92680950752379	143.06995106518903	20428.07287599999	20469.010897795582

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/having.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/having.q.out?rev=1619924&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/having.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/having.q.out Fri Aug 22 20:34:37 2014
@@ -0,0 +1,1292 @@
+PREHOOK: query: EXPLAIN SELECT count(value) AS c FROM src GROUP BY key HAVING c > 3
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT count(value) AS c FROM src GROUP BY key HAVING c > 3
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: key, value
+                    Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: count(value)
+                      keys: key (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col1 (type: bigint)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                Filter Operator
+                  predicate: (_col1 > 3) (type: boolean)
+                  Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col1 (type: bigint)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT count(value) AS c FROM src GROUP BY key HAVING c > 3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT count(value) AS c FROM src GROUP BY key HAVING c > 3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+4
+5
+4
+5
+4
+4
+4
+5
+4
+5
+PREHOOK: query: EXPLAIN SELECT key, max(value) AS c FROM src GROUP BY key HAVING key != 302
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT key, max(value) AS c FROM src GROUP BY key HAVING key != 302
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key <> 302) (type: boolean)
+                    Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: key, value
+                      Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                      Group By Operator
+                        aggregations: max(value)
+                        keys: key (type: string)
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                          value expressions: _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: max(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), _col1 (type: string)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT key, max(value) AS c FROM src GROUP BY key HAVING key != 302
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT key, max(value) AS c FROM src GROUP BY key HAVING key != 302
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+168	val_168
+436	val_436
+170	val_170
+364	val_364
+209	val_209
+403	val_403
+374	val_374
+11	val_11
+195	val_195
+252	val_252
+146	val_146
+95	val_95
+118	val_118
+189	val_189
+199	val_199
+196	val_196
+100	val_100
+382	val_382
+30	val_30
+455	val_455
+498	val_498
+111	val_111
+287	val_287
+248	val_248
+19	val_19
+311	val_311
+76	val_76
+178	val_178
+160	val_160
+221	val_221
+87	val_87
+406	val_406
+125	val_125
+463	val_463
+459	val_459
+263	val_263
+166	val_166
+335	val_335
+28	val_28
+223	val_223
+174	val_174
+98	val_98
+208	val_208
+4	val_4
+404	val_404
+47	val_47
+54	val_54
+368	val_368
+377	val_377
+37	val_37
+280	val_280
+57	val_57
+203	val_203
+308	val_308
+291	val_291
+278	val_278
+484	val_484
+409	val_409
+155	val_155
+317	val_317
+475	val_475
+260	val_260
+429	val_429
+417	val_417
+284	val_284
+413	val_413
+309	val_309
+27	val_27
+296	val_296
+67	val_67
+244	val_244
+230	val_230
+96	val_96
+183	val_183
+249	val_249
+289	val_289
+427	val_427
+418	val_418
+181	val_181
+472	val_472
+454	val_454
+310	val_310
+207	val_207
+233	val_233
+194	val_194
+224	val_224
+129	val_129
+360	val_360
+10	val_10
+460	val_460
+5	val_5
+12	val_12
+481	val_481
+85	val_85
+58	val_58
+369	val_369
+482	val_482
+214	val_214
+177	val_177
+193	val_193
+9	val_9
+34	val_34
+419	val_419
+378	val_378
+165	val_165
+201	val_201
+241	val_241
+281	val_281
+82	val_82
+292	val_292
+285	val_285
+400	val_400
+53	val_53
+298	val_298
+397	val_397
+43	val_43
+272	val_272
+332	val_332
+321	val_321
+389	val_389
+131	val_131
+286	val_286
+242	val_242
+421	val_421
+487	val_487
+78	val_78
+396	val_396
+375	val_375
+83	val_83
+258	val_258
+307	val_307
+41	val_41
+392	val_392
+277	val_277
+327	val_327
+490	val_490
+134	val_134
+333	val_333
+64	val_64
+477	val_477
+256	val_256
+401	val_401
+402	val_402
+435	val_435
+393	val_393
+362	val_362
+492	val_492
+288	val_288
+438	val_438
+104	val_104
+113	val_113
+176	val_176
+448	val_448
+394	val_394
+457	val_457
+162	val_162
+480	val_480
+384	val_384
+323	val_323
+497	val_497
+20	val_20
+446	val_446
+237	val_237
+26	val_26
+222	val_222
+386	val_386
+467	val_467
+468	val_468
+218	val_218
+51	val_51
+163	val_163
+315	val_315
+150	val_150
+273	val_273
+366	val_366
+226	val_226
+74	val_74
+345	val_345
+424	val_424
+491	val_491
+431	val_431
+395	val_395
+342	val_342
+149	val_149
+105	val_105
+167	val_167
+69	val_69
+172	val_172
+66	val_66
+70	val_70
+2	val_2
+239	val_239
+449	val_449
+179	val_179
+458	val_458
+202	val_202
+356	val_356
+430	val_430
+466	val_466
+478	val_478
+453	val_453
+493	val_493
+338	val_338
+365	val_365
+379	val_379
+353	val_353
+247	val_247
+283	val_283
+200	val_200
+158	val_158
+485	val_485
+103	val_103
+275	val_275
+138	val_138
+452	val_452
+217	val_217
+351	val_351
+325	val_325
+197	val_197
+414	val_414
+84	val_84
+399	val_399
+24	val_24
+133	val_133
+305	val_305
+367	val_367
+157	val_157
+262	val_262
+128	val_128
+116	val_116
+437	val_437
+156	val_156
+479	val_479
+153	val_153
+80	val_80
+407	val_407
+120	val_120
+411	val_411
+336	val_336
+322	val_322
+44	val_44
+443	val_443
+187	val_187
+341	val_341
+238	val_238
+255	val_255
+274	val_274
+8	val_8
+213	val_213
+235	val_235
+143	val_143
+126	val_126
+219	val_219
+318	val_318
+169	val_169
+92	val_92
+15	val_15
+33	val_33
+114	val_114
+496	val_496
+180	val_180
+191	val_191
+145	val_145
+444	val_444
+432	val_432
+344	val_344
+470	val_470
+483	val_483
+495	val_495
+339	val_339
+35	val_35
+373	val_373
+18	val_18
+152	val_152
+348	val_348
+72	val_72
+316	val_316
+462	val_462
+97	val_97
+229	val_229
+494	val_494
+331	val_331
+90	val_90
+216	val_216
+164	val_164
+175	val_175
+119	val_119
+190	val_190
+0	val_0
+192	val_192
+65	val_65
+489	val_489
+228	val_228
+306	val_306
+469	val_469
+77	val_77
+282	val_282
+17	val_17
+137	val_137
+266	val_266
+42	val_42
+205	val_205
+257	val_257
+186	val_186
+136	val_136
+265	val_265
+439	val_439
+86	val_86
+PREHOOK: query: EXPLAIN SELECT key FROM src GROUP BY key HAVING max(value) > "val_255"
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT key FROM src GROUP BY key HAVING max(value) > "val_255"
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: key, value
+                    Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: max(value)
+                      keys: key (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: max(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                Filter Operator
+                  predicate: (_col1 > 'val_255') (type: boolean)
+                  Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT key FROM src GROUP BY key HAVING max(value) > "val_255"
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT key FROM src GROUP BY key HAVING max(value) > "val_255"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+436
+364
+403
+374
+95
+382
+30
+455
+498
+287
+311
+76
+87
+406
+463
+459
+263
+335
+28
+98
+4
+404
+47
+54
+368
+377
+37
+280
+57
+308
+291
+278
+484
+409
+317
+475
+260
+429
+417
+284
+413
+309
+27
+296
+67
+96
+289
+427
+418
+472
+454
+310
+360
+460
+5
+481
+85
+58
+369
+482
+9
+34
+419
+378
+281
+82
+292
+285
+400
+53
+298
+397
+43
+272
+332
+321
+389
+286
+421
+487
+78
+396
+375
+83
+258
+307
+41
+392
+277
+327
+490
+333
+64
+477
+256
+401
+402
+435
+393
+362
+492
+288
+438
+448
+394
+457
+480
+384
+323
+497
+446
+26
+386
+467
+468
+51
+315
+273
+366
+74
+345
+424
+491
+431
+395
+342
+302
+69
+66
+70
+449
+458
+356
+430
+466
+478
+453
+493
+338
+365
+379
+353
+283
+485
+275
+452
+351
+325
+414
+84
+399
+305
+367
+262
+437
+479
+80
+407
+411
+336
+322
+44
+443
+341
+274
+8
+318
+92
+33
+496
+444
+432
+344
+470
+483
+495
+339
+35
+373
+348
+72
+316
+462
+97
+494
+331
+90
+65
+489
+306
+469
+77
+282
+266
+42
+257
+265
+439
+86
+PREHOOK: query: EXPLAIN SELECT key FROM src where key > 300 GROUP BY key HAVING max(value) > "val_255"
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT key FROM src where key > 300 GROUP BY key HAVING max(value) > "val_255"
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key > 300) (type: boolean)
+                    Statistics: Num rows: 9 Data size: 1803 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: key, value
+                      Statistics: Num rows: 9 Data size: 1803 Basic stats: COMPLETE Column stats: NONE
+                      Group By Operator
+                        aggregations: max(value)
+                        keys: key (type: string)
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 9 Data size: 1803 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 9 Data size: 1803 Basic stats: COMPLETE Column stats: NONE
+                          value expressions: _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: max(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                Filter Operator
+                  predicate: (_col1 > 'val_255') (type: boolean)
+                  Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT key FROM src where key > 300 GROUP BY key HAVING max(value) > "val_255"
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT key FROM src where key > 300 GROUP BY key HAVING max(value) > "val_255"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+364
+436
+438
+448
+403
+394
+374
+480
+384
+323
+497
+446
+457
+468
+467
+386
+315
+382
+455
+366
+498
+345
+342
+424
+491
+311
+395
+431
+302
+406
+463
+459
+335
+449
+458
+356
+404
+430
+377
+368
+466
+478
+453
+493
+338
+308
+365
+379
+353
+485
+452
+484
+409
+351
+325
+317
+475
+429
+414
+417
+413
+309
+399
+305
+367
+336
+479
+437
+407
+411
+427
+418
+443
+322
+341
+472
+454
+310
+360
+460
+318
+481
+496
+369
+482
+444
+432
+344
+470
+483
+495
+419
+378
+339
+373
+348
+400
+316
+462
+494
+397
+332
+331
+321
+389
+333
+477
+396
+487
+421
+375
+489
+327
+307
+392
+306
+469
+490
+439
+435
+401
+402
+393
+362
+492
+PREHOOK: query: EXPLAIN SELECT key, max(value) FROM src GROUP BY key HAVING max(value) > "val_255"
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT key, max(value) FROM src GROUP BY key HAVING max(value) > "val_255"
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: key, value
+                    Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: max(value)
+                      keys: key (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: max(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                Filter Operator
+                  predicate: (_col1 > 'val_255') (type: boolean)
+                  Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT key, max(value) FROM src GROUP BY key HAVING max(value) > "val_255"
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT key, max(value) FROM src GROUP BY key HAVING max(value) > "val_255"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+436	val_436
+364	val_364
+403	val_403
+374	val_374
+95	val_95
+382	val_382
+30	val_30
+455	val_455
+498	val_498
+287	val_287
+311	val_311
+76	val_76
+87	val_87
+406	val_406
+463	val_463
+459	val_459
+263	val_263
+335	val_335
+28	val_28
+98	val_98
+4	val_4
+404	val_404
+47	val_47
+54	val_54
+368	val_368
+377	val_377
+37	val_37
+280	val_280
+57	val_57
+308	val_308
+291	val_291
+278	val_278
+484	val_484
+409	val_409
+317	val_317
+475	val_475
+260	val_260
+429	val_429
+417	val_417
+284	val_284
+413	val_413
+309	val_309
+27	val_27
+296	val_296
+67	val_67
+96	val_96
+289	val_289
+427	val_427
+418	val_418
+472	val_472
+454	val_454
+310	val_310
+360	val_360
+460	val_460
+5	val_5
+481	val_481
+85	val_85
+58	val_58
+369	val_369
+482	val_482
+9	val_9
+34	val_34
+419	val_419
+378	val_378
+281	val_281
+82	val_82
+292	val_292
+285	val_285
+400	val_400
+53	val_53
+298	val_298
+397	val_397
+43	val_43
+272	val_272
+332	val_332
+321	val_321
+389	val_389
+286	val_286
+421	val_421
+487	val_487
+78	val_78
+396	val_396
+375	val_375
+83	val_83
+258	val_258
+307	val_307
+41	val_41
+392	val_392
+277	val_277
+327	val_327
+490	val_490
+333	val_333
+64	val_64
+477	val_477
+256	val_256
+401	val_401
+402	val_402
+435	val_435
+393	val_393
+362	val_362
+492	val_492
+288	val_288
+438	val_438
+448	val_448
+394	val_394
+457	val_457
+480	val_480
+384	val_384
+323	val_323
+497	val_497
+446	val_446
+26	val_26
+386	val_386
+467	val_467
+468	val_468
+51	val_51
+315	val_315
+273	val_273
+366	val_366
+74	val_74
+345	val_345
+424	val_424
+491	val_491
+431	val_431
+395	val_395
+342	val_342
+302	val_302
+69	val_69
+66	val_66
+70	val_70
+449	val_449
+458	val_458
+356	val_356
+430	val_430
+466	val_466
+478	val_478
+453	val_453
+493	val_493
+338	val_338
+365	val_365
+379	val_379
+353	val_353
+283	val_283
+485	val_485
+275	val_275
+452	val_452
+351	val_351
+325	val_325
+414	val_414
+84	val_84
+399	val_399
+305	val_305
+367	val_367
+262	val_262
+437	val_437
+479	val_479
+80	val_80
+407	val_407
+411	val_411
+336	val_336
+322	val_322
+44	val_44
+443	val_443
+341	val_341
+274	val_274
+8	val_8
+318	val_318
+92	val_92
+33	val_33
+496	val_496
+444	val_444
+432	val_432
+344	val_344
+470	val_470
+483	val_483
+495	val_495
+339	val_339
+35	val_35
+373	val_373
+348	val_348
+72	val_72
+316	val_316
+462	val_462
+97	val_97
+494	val_494
+331	val_331
+90	val_90
+65	val_65
+489	val_489
+306	val_306
+469	val_469
+77	val_77
+282	val_282
+266	val_266
+42	val_42
+257	val_257
+265	val_265
+439	val_439
+86	val_86
+PREHOOK: query: EXPLAIN SELECT key, COUNT(value) FROM src GROUP BY key HAVING count(value) >= 4
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT key, COUNT(value) FROM src GROUP BY key HAVING count(value) >= 4
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: key, value
+                    Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: count(value)
+                      keys: key (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col1 (type: bigint)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                Filter Operator
+                  predicate: (_col1 >= 4) (type: boolean)
+                  Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: bigint)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT key, COUNT(value) FROM src GROUP BY key HAVING count(value) >= 4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT key, COUNT(value) FROM src GROUP BY key HAVING count(value) >= 4
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+406	4
+230	5
+277	4
+401	5
+468	4
+138	4
+169	4
+348	5
+489	4
+469	5

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/insert1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/insert1.q.out?rev=1619924&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/insert1.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/insert1.q.out Fri Aug 22 20:34:37 2014
@@ -0,0 +1,447 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+create table insert1(key int, value string) stored as textfile
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert1
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+create table insert1(key int, value string) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert1
+PREHOOK: query: create table insert2(key int, value string) stored as textfile
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert2
+POSTHOOK: query: create table insert2(key int, value string) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert2
+PREHOOK: query: insert overwrite table insert1 select a.key, a.value from insert2 a WHERE (a.key=-1)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert2
+PREHOOK: Output: default@insert1
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: insert overwrite table insert1 select a.key, a.value from insert2 a WHERE (a.key=-1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert2
+POSTHOOK: Output: default@insert1
+POSTHOOK: Lineage: insert1.key SIMPLE [(insert2)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert1.value SIMPLE [(insert2)a.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: explain insert into table insert1 select a.key, a.value from insert2 a WHERE (a.key=-1)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain insert into table insert1 select a.key, a.value from insert2 a WHERE (a.key=-1)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: a
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Filter Operator
+                    predicate: (key = -1) (type: boolean)
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Select Operator
+                      expressions: -1 (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.insert1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: explain insert into table INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain insert into table INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: a
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Filter Operator
+                    predicate: (key = -1) (type: boolean)
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Select Operator
+                      expressions: -1 (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.insert1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: -- HIVE-3465
+create database x
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:x
+POSTHOOK: query: -- HIVE-3465
+create database x
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:x
+PREHOOK: query: create table x.insert1(key int, value string) stored as textfile
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:x
+PREHOOK: Output: x@x.insert1
+POSTHOOK: query: create table x.insert1(key int, value string) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:x
+POSTHOOK: Output: x@insert1
+POSTHOOK: Output: x@x.insert1
+PREHOOK: query: explain insert into table x.INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain insert into table x.INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: a
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Filter Operator
+                    predicate: (key = -1) (type: boolean)
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Select Operator
+                      expressions: -1 (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: x.insert1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: x.insert1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: explain insert into table default.INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain insert into table default.INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: a
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Filter Operator
+                    predicate: (key = -1) (type: boolean)
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Select Operator
+                      expressions: -1 (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.insert1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: explain
+from insert2
+insert into table insert1 select * where key < 10
+insert overwrite table x.insert1 select * where key > 10 and key < 20
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+from insert2
+insert into table insert1 select * where key < 10
+insert overwrite table x.insert1 select * where key > 10 and key < 20
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: insert2
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Filter Operator
+                    predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.insert1
+                  Filter Operator
+                    predicate: ((key > 10) and (key < 20)) (type: boolean)
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: x.insert1
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert1
+
+  Stage: Stage-4
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: x.insert1
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+PREHOOK: query: -- HIVE-3676
+CREATE DATABASE db2
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:db2
+POSTHOOK: query: -- HIVE-3676
+CREATE DATABASE db2
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:db2
+PREHOOK: query: USE db2
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:db2
+POSTHOOK: query: USE db2
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:db2
+PREHOOK: query: CREATE TABLE result(col1 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:db2
+PREHOOK: Output: db2@result
+POSTHOOK: query: CREATE TABLE result(col1 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:db2
+POSTHOOK: Output: db2@result
+PREHOOK: query: INSERT OVERWRITE TABLE result SELECT 'db2_insert1' FROM default.src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: db2@result
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: INSERT OVERWRITE TABLE result SELECT 'db2_insert1' FROM default.src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: db2@result
+POSTHOOK: Lineage: result.col1 SIMPLE []
+PREHOOK: query: INSERT INTO TABLE result SELECT 'db2_insert2' FROM default.src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: db2@result
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: INSERT INTO TABLE result SELECT 'db2_insert2' FROM default.src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: db2@result
+POSTHOOK: Lineage: result.col1 SIMPLE []
+PREHOOK: query: SELECT * FROM result
+PREHOOK: type: QUERY
+PREHOOK: Input: db2@result
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM result
+POSTHOOK: type: QUERY
+POSTHOOK: Input: db2@result
+#### A masked pattern was here ####
+db2_insert1
+db2_insert2
+PREHOOK: query: USE default
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:default
+POSTHOOK: query: USE default
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:default
+PREHOOK: query: CREATE DATABASE db1
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:db1
+POSTHOOK: query: CREATE DATABASE db1
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:db1
+PREHOOK: query: CREATE TABLE db1.result(col1 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:db1
+PREHOOK: Output: db1@db1.result
+POSTHOOK: query: CREATE TABLE db1.result(col1 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:db1
+POSTHOOK: Output: db1@db1.result
+POSTHOOK: Output: db1@result
+PREHOOK: query: INSERT OVERWRITE TABLE db1.result SELECT 'db1_insert1' FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: db1@result
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: INSERT OVERWRITE TABLE db1.result SELECT 'db1_insert1' FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: db1@result
+POSTHOOK: Lineage: result.col1 SIMPLE []
+PREHOOK: query: INSERT INTO TABLE db1.result SELECT 'db1_insert2' FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: db1@result
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: INSERT INTO TABLE db1.result SELECT 'db1_insert2' FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: db1@result
+POSTHOOK: Lineage: result.col1 SIMPLE []
+PREHOOK: query: SELECT * FROM db1.result
+PREHOOK: type: QUERY
+PREHOOK: Input: db1@result
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM db1.result
+POSTHOOK: type: QUERY
+POSTHOOK: Input: db1@result
+#### A masked pattern was here ####
+db1_insert1
+db1_insert2