You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2014/09/24 05:16:26 UTC

svn commit: r1627210 [6/14] - in /hive/branches/spark: itests/src/test/resources/ ql/src/test/results/clientpositive/spark/

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/input13.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/input13.q.out?rev=1627210&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/input13.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/input13.q.out Wed Sep 24 03:16:25 2014
@@ -0,0 +1,768 @@
+PREHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
+POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1
+PREHOOK: query: CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest2
+POSTHOOK: query: CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest2
+PREHOOK: query: CREATE TABLE dest3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest3
+POSTHOOK: query: CREATE TABLE dest3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest3
+PREHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-6 depends on stages: Stage-4
+  Stage-5 depends on stages: Stage-6, Stage-7, Stage-8, Stage-9
+  Stage-0 depends on stages: Stage-5
+  Stage-10 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-5
+  Stage-11 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-5
+  Stage-12 depends on stages: Stage-2
+  Stage-7 depends on stages: Stage-4
+  Stage-8 depends on stages: Stage-4
+  Stage-9 depends on stages: Stage-4
+  Stage-3 depends on stages: Stage-9
+
+STAGE PLANS:
+  Stage: Stage-4
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 5 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-6
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  Filter Operator
+                    predicate: (key < 100) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: UDFToInteger(key) (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.dest1
+
+  Stage: Stage-5
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-10
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest2
+
+  Stage: Stage-11
+    Stats-Aggr Operator
+
+  Stage: Stage-2
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 12
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest3
+
+  Stage: Stage-12
+    Stats-Aggr Operator
+
+  Stage: Stage-7
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  Filter Operator
+                    predicate: ((key >= 100) and (key < 200)) (type: boolean)
+                    Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: UDFToInteger(key) (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.dest2
+
+  Stage: Stage-8
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  Filter Operator
+                    predicate: ((key >= 200) and (key < 300)) (type: boolean)
+                    Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: UDFToInteger(key) (type: int)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.dest3
+
+  Stage: Stage-9
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  Filter Operator
+                    predicate: (key >= 300) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: value (type: string)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-3
+    Move Operator
+      files:
+          hdfs directory: true
+          destination: target/warehouse/dest4.out
+
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+PREHOOK: Output: default@dest2
+PREHOOK: Output: default@dest3@ds=2008-04-08/hr=12
+PREHOOK: Output: target/warehouse/dest4.out
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Output: default@dest2
+POSTHOOK: Output: default@dest3@ds=2008-04-08/hr=12
+POSTHOOK: Output: target/warehouse/dest4.out
+POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+86	val_86
+27	val_27
+98	val_98
+66	val_66
+37	val_37
+15	val_15
+82	val_82
+17	val_17
+0	val_0
+57	val_57
+20	val_20
+92	val_92
+47	val_47
+72	val_72
+4	val_4
+35	val_35
+54	val_54
+51	val_51
+65	val_65
+83	val_83
+12	val_12
+67	val_67
+84	val_84
+58	val_58
+8	val_8
+24	val_24
+42	val_42
+0	val_0
+96	val_96
+26	val_26
+51	val_51
+43	val_43
+95	val_95
+98	val_98
+85	val_85
+77	val_77
+0	val_0
+87	val_87
+15	val_15
+72	val_72
+90	val_90
+19	val_19
+10	val_10
+5	val_5
+58	val_58
+35	val_35
+95	val_95
+11	val_11
+34	val_34
+42	val_42
+78	val_78
+76	val_76
+41	val_41
+30	val_30
+64	val_64
+76	val_76
+74	val_74
+69	val_69
+33	val_33
+70	val_70
+5	val_5
+2	val_2
+35	val_35
+80	val_80
+44	val_44
+53	val_53
+90	val_90
+12	val_12
+5	val_5
+70	val_70
+24	val_24
+70	val_70
+83	val_83
+26	val_26
+67	val_67
+18	val_18
+9	val_9
+18	val_18
+97	val_97
+84	val_84
+28	val_28
+37	val_37
+90	val_90
+97	val_97
+PREHOOK: query: SELECT dest2.* FROM dest2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest2.* FROM dest2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest2
+#### A masked pattern was here ####
+165	val_165
+193	val_193
+150	val_150
+128	val_128
+146	val_146
+152	val_152
+145	val_145
+166	val_166
+153	val_153
+193	val_193
+174	val_174
+199	val_199
+174	val_174
+162	val_162
+167	val_167
+195	val_195
+113	val_113
+155	val_155
+128	val_128
+149	val_149
+129	val_129
+170	val_170
+157	val_157
+111	val_111
+169	val_169
+125	val_125
+192	val_192
+187	val_187
+176	val_176
+138	val_138
+103	val_103
+176	val_176
+137	val_137
+180	val_180
+181	val_181
+138	val_138
+179	val_179
+172	val_172
+129	val_129
+158	val_158
+119	val_119
+197	val_197
+100	val_100
+199	val_199
+191	val_191
+165	val_165
+120	val_120
+131	val_131
+156	val_156
+196	val_196
+197	val_197
+187	val_187
+137	val_137
+169	val_169
+179	val_179
+118	val_118
+134	val_134
+138	val_138
+118	val_118
+177	val_177
+168	val_168
+143	val_143
+160	val_160
+195	val_195
+119	val_119
+149	val_149
+138	val_138
+103	val_103
+113	val_113
+167	val_167
+116	val_116
+191	val_191
+128	val_128
+193	val_193
+104	val_104
+175	val_175
+105	val_105
+190	val_190
+114	val_114
+164	val_164
+125	val_125
+164	val_164
+187	val_187
+104	val_104
+163	val_163
+119	val_119
+199	val_199
+120	val_120
+169	val_169
+178	val_178
+136	val_136
+172	val_172
+133	val_133
+175	val_175
+189	val_189
+134	val_134
+100	val_100
+146	val_146
+186	val_186
+167	val_167
+183	val_183
+152	val_152
+194	val_194
+126	val_126
+169	val_169
+PREHOOK: query: SELECT dest3.* FROM dest3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest3
+PREHOOK: Input: default@dest3@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest3.* FROM dest3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest3
+POSTHOOK: Input: default@dest3@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+238	2008-04-08	12
+255	2008-04-08	12
+278	2008-04-08	12
+265	2008-04-08	12
+273	2008-04-08	12
+224	2008-04-08	12
+213	2008-04-08	12
+281	2008-04-08	12
+277	2008-04-08	12
+209	2008-04-08	12
+252	2008-04-08	12
+292	2008-04-08	12
+219	2008-04-08	12
+287	2008-04-08	12
+237	2008-04-08	12
+207	2008-04-08	12
+208	2008-04-08	12
+247	2008-04-08	12
+266	2008-04-08	12
+203	2008-04-08	12
+205	2008-04-08	12
+221	2008-04-08	12
+280	2008-04-08	12
+277	2008-04-08	12
+208	2008-04-08	12
+286	2008-04-08	12
+239	2008-04-08	12
+213	2008-04-08	12
+216	2008-04-08	12
+278	2008-04-08	12
+289	2008-04-08	12
+221	2008-04-08	12
+275	2008-04-08	12
+241	2008-04-08	12
+284	2008-04-08	12
+230	2008-04-08	12
+260	2008-04-08	12
+272	2008-04-08	12
+217	2008-04-08	12
+230	2008-04-08	12
+208	2008-04-08	12
+298	2008-04-08	12
+230	2008-04-08	12
+205	2008-04-08	12
+288	2008-04-08	12
+282	2008-04-08	12
+282	2008-04-08	12
+238	2008-04-08	12
+277	2008-04-08	12
+273	2008-04-08	12
+224	2008-04-08	12
+242	2008-04-08	12
+272	2008-04-08	12
+242	2008-04-08	12
+226	2008-04-08	12
+229	2008-04-08	12
+233	2008-04-08	12
+223	2008-04-08	12
+218	2008-04-08	12
+228	2008-04-08	12
+209	2008-04-08	12
+230	2008-04-08	12
+296	2008-04-08	12
+216	2008-04-08	12
+274	2008-04-08	12
+219	2008-04-08	12
+239	2008-04-08	12
+223	2008-04-08	12
+256	2008-04-08	12
+263	2008-04-08	12
+288	2008-04-08	12
+244	2008-04-08	12
+202	2008-04-08	12
+229	2008-04-08	12
+280	2008-04-08	12
+283	2008-04-08	12
+235	2008-04-08	12
+257	2008-04-08	12
+258	2008-04-08	12
+203	2008-04-08	12
+262	2008-04-08	12
+201	2008-04-08	12
+217	2008-04-08	12
+298	2008-04-08	12
+291	2008-04-08	12
+255	2008-04-08	12
+200	2008-04-08	12
+237	2008-04-08	12
+248	2008-04-08	12
+277	2008-04-08	12
+230	2008-04-08	12
+207	2008-04-08	12
+249	2008-04-08	12
+265	2008-04-08	12
+214	2008-04-08	12
+233	2008-04-08	12
+256	2008-04-08	12
+298	2008-04-08	12
+285	2008-04-08	12
+273	2008-04-08	12
+281	2008-04-08	12
+222	2008-04-08	12
+200	2008-04-08	12
+val_311
+val_409
+val_484
+val_401
+val_369
+val_406
+val_429
+val_374
+val_469
+val_495
+val_327
+val_403
+val_417
+val_430
+val_338
+val_446
+val_459
+val_394
+val_482
+val_413
+val_494
+val_466
+val_399
+val_396
+val_417
+val_489
+val_377
+val_397
+val_309
+val_365
+val_439
+val_342
+val_367
+val_325
+val_475
+val_339
+val_455
+val_311
+val_316
+val_302
+val_438
+val_345
+val_489
+val_378
+val_427
+val_356
+val_399
+val_382
+val_498
+val_386
+val_437
+val_469
+val_459
+val_430
+val_318
+val_332
+val_311
+val_333
+val_404
+val_384
+val_489
+val_353
+val_373
+val_348
+val_466
+val_411
+val_348
+val_463
+val_431
+val_496
+val_322
+val_468
+val_393
+val_454
+val_418
+val_327
+val_404
+val_436
+val_469
+val_468
+val_308
+val_481
+val_457
+val_318
+val_318
+val_409
+val_470
+val_369
+val_316
+val_413
+val_490
+val_364
+val_395
+val_419
+val_307
+val_435
+val_306
+val_309
+val_389
+val_327
+val_369
+val_392
+val_331
+val_401
+val_452
+val_497
+val_402
+val_396
+val_317
+val_395
+val_336
+val_472
+val_322
+val_498
+val_321
+val_430
+val_489
+val_458
+val_492
+val_449
+val_453
+val_468
+val_342
+val_368
+val_367
+val_344
+val_485
+val_487
+val_480
+val_401
+val_438
+val_467
+val_432
+val_316
+val_469
+val_463
+val_331
+val_321
+val_335
+val_466
+val_366
+val_403
+val_483
+val_406
+val_409
+val_406
+val_401
+val_348
+val_424
+val_396
+val_431
+val_454
+val_478
+val_431
+val_424
+val_382
+val_397
+val_480
+val_351
+val_438
+val_414
+val_491
+val_439
+val_360
+val_479
+val_305
+val_417
+val_444
+val_429
+val_443
+val_323
+val_325
+val_478
+val_468
+val_310
+val_317
+val_333
+val_493
+val_460
+val_480
+val_353
+val_462
+val_406
+val_454
+val_375
+val_401
+val_421
+val_407
+val_384
+val_384
+val_379
+val_462
+val_492
+val_341
+val_498
+val_458
+val_362
+val_348
+val_344
+val_469
+val_315
+val_448
+val_348
+val_307
+val_414
+val_477
+val_403
+val_400

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/input14.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/input14.q.out?rev=1627210&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/input14.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/input14.q.out Wed Sep 24 03:16:25 2014
@@ -0,0 +1,210 @@
+PREHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
+POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      Filter Operator
+                        predicate: (_col0 < 100) (type: boolean)
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                          value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: UDFToInteger(VALUE._col0) (type: int), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.dest1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+0	val_0
+0	val_0
+0	val_0
+10	val_10
+11	val_11
+12	val_12
+12	val_12
+15	val_15
+15	val_15
+17	val_17
+18	val_18
+18	val_18
+19	val_19
+2	val_2
+20	val_20
+24	val_24
+24	val_24
+26	val_26
+26	val_26
+27	val_27
+28	val_28
+30	val_30
+33	val_33
+34	val_34
+35	val_35
+35	val_35
+35	val_35
+37	val_37
+37	val_37
+4	val_4
+41	val_41
+42	val_42
+42	val_42
+43	val_43
+44	val_44
+47	val_47
+5	val_5
+5	val_5
+5	val_5
+51	val_51
+51	val_51
+53	val_53
+54	val_54
+57	val_57
+58	val_58
+58	val_58
+64	val_64
+65	val_65
+66	val_66
+67	val_67
+67	val_67
+69	val_69
+70	val_70
+70	val_70
+70	val_70
+72	val_72
+72	val_72
+74	val_74
+76	val_76
+76	val_76
+77	val_77
+78	val_78
+8	val_8
+80	val_80
+82	val_82
+83	val_83
+83	val_83
+84	val_84
+84	val_84
+85	val_85
+86	val_86
+87	val_87
+9	val_9
+90	val_90
+90	val_90
+90	val_90
+92	val_92
+95	val_95
+95	val_95
+96	val_96
+97	val_97
+97	val_97
+98	val_98
+98	val_98

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/input17.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/input17.q.out?rev=1627210&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/input17.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/input17.q.out Wed Sep 24 03:16:25 2014
@@ -0,0 +1,134 @@
+PREHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
+POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
+FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src_thrift
+                  Statistics: Num rows: 11 Data size: 3070 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: (aint + lint[0]) (type: int), lintstring[0] (type: struct<myint:int,mystring:string,underscore_int:int>)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 11 Data size: 3070 Basic stats: COMPLETE Column stats: NONE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 11 Data size: 3070 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 11 Data size: 3070 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: UDFToInteger(VALUE._col0) (type: int), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 11 Data size: 3070 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 11 Data size: 3070 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.dest1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@dest1
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.key SCRIPT [(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lint, type:array<int>, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array<struct<myint:int,mystring:string,underscore_int:int>>, comment:from deserializer), ]
+POSTHOOK: Lineage: dest1.value SCRIPT [(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lint, type:array<int>, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array<struct<myint:int,mystring:string,underscore_int:int>>, comment:from deserializer), ]
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+NULL	NULL
+-1461153966	{"myint":49,"mystring":"343","underscore_int":7}
+-1952710705	{"myint":25,"mystring":"125","underscore_int":5}
+-734328905	{"myint":16,"mystring":"64","underscore_int":4}
+-751827636	{"myint":4,"mystring":"8","underscore_int":2}
+1244525196	{"myint":36,"mystring":"216","underscore_int":6}
+1638581586	{"myint":64,"mystring":"512","underscore_int":8}
+1712634731	{"myint":0,"mystring":"0","underscore_int":0}
+336964422	{"myint":81,"mystring":"729","underscore_int":9}
+465985201	{"myint":1,"mystring":"1","underscore_int":1}
+477111225	{"myint":9,"mystring":"27","underscore_int":3}

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/input18.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/input18.q.out?rev=1627210&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/input18.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/input18.q.out Wed Sep 24 03:16:25 2014
@@ -0,0 +1,210 @@
+PREHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
+POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING 'cat'
+  CLUSTER BY key
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING 'cat'
+  CLUSTER BY key
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int)
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      Filter Operator
+                        predicate: (_col0 < 100) (type: boolean)
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                          value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: UDFToInteger(VALUE._col0) (type: int), regexp_replace(VALUE._col1, '	', '+') (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.dest1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING 'cat'
+  CLUSTER BY key
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING 'cat'
+  CLUSTER BY key
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+0	val_0+3+7
+0	val_0+3+7
+0	val_0+3+7
+10	val_10+3+7
+11	val_11+3+7
+12	val_12+3+7
+12	val_12+3+7
+15	val_15+3+7
+15	val_15+3+7
+17	val_17+3+7
+18	val_18+3+7
+18	val_18+3+7
+19	val_19+3+7
+2	val_2+3+7
+20	val_20+3+7
+24	val_24+3+7
+24	val_24+3+7
+26	val_26+3+7
+26	val_26+3+7
+27	val_27+3+7
+28	val_28+3+7
+30	val_30+3+7
+33	val_33+3+7
+34	val_34+3+7
+35	val_35+3+7
+35	val_35+3+7
+35	val_35+3+7
+37	val_37+3+7
+37	val_37+3+7
+4	val_4+3+7
+41	val_41+3+7
+42	val_42+3+7
+42	val_42+3+7
+43	val_43+3+7
+44	val_44+3+7
+47	val_47+3+7
+5	val_5+3+7
+5	val_5+3+7
+5	val_5+3+7
+51	val_51+3+7
+51	val_51+3+7
+53	val_53+3+7
+54	val_54+3+7
+57	val_57+3+7
+58	val_58+3+7
+58	val_58+3+7
+64	val_64+3+7
+65	val_65+3+7
+66	val_66+3+7
+67	val_67+3+7
+67	val_67+3+7
+69	val_69+3+7
+70	val_70+3+7
+70	val_70+3+7
+70	val_70+3+7
+72	val_72+3+7
+72	val_72+3+7
+74	val_74+3+7
+76	val_76+3+7
+76	val_76+3+7
+77	val_77+3+7
+78	val_78+3+7
+8	val_8+3+7
+80	val_80+3+7
+82	val_82+3+7
+83	val_83+3+7
+83	val_83+3+7
+84	val_84+3+7
+84	val_84+3+7
+85	val_85+3+7
+86	val_86+3+7
+87	val_87+3+7
+9	val_9+3+7
+90	val_90+3+7
+90	val_90+3+7
+90	val_90+3+7
+92	val_92+3+7
+95	val_95+3+7
+95	val_95+3+7
+96	val_96+3+7
+97	val_97+3+7
+97	val_97+3+7
+98	val_98+3+7
+98	val_98+3+7

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/input1_limit.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/input1_limit.q.out?rev=1627210&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/input1_limit.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/input1_limit.q.out Wed Sep 24 03:16:25 2014
@@ -0,0 +1,227 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1
+PREHOOK: query: CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest2
+POSTHOOK: query: CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest2
+PREHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key < 100 LIMIT 5
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key < 100 LIMIT 5
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-4 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-4, Stage-5
+  Stage-0 depends on stages: Stage-3
+  Stage-6 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-7 depends on stages: Stage-1
+  Stage-5 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-4
+    Spark
+      Edges:
+        Reducer 4 <- Map 1 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  Filter Operator
+                    predicate: (key < 100) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                      Limit
+                        Number of rows: 10
+                        Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          sort order: 
+                          Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                          value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 4 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.dest1
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-6
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest2
+
+  Stage: Stage-7
+    Stats-Aggr Operator
+
+  Stage: Stage-5
+    Spark
+      Edges:
+        Reducer 5 <- Map 2 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  Filter Operator
+                    predicate: (key < 100) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                      Limit
+                        Number of rows: 5
+                        Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          sort order: 
+                          Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+                          value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 5 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 5
+                  Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.dest2
+
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key < 100 LIMIT 5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+PREHOOK: Output: default@dest2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key < 100 LIMIT 5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Output: default@dest2
+POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+0	val_0
+15	val_15
+17	val_17
+27	val_27
+37	val_37
+57	val_57
+66	val_66
+82	val_82
+86	val_86
+98	val_98
+PREHOOK: query: SELECT dest2.* FROM dest2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest2.* FROM dest2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest2
+#### A masked pattern was here ####
+27	val_27
+37	val_37
+66	val_66
+86	val_86
+98	val_98

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/input_part2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/input_part2.q.out?rev=1627210&view=auto
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/input_part2.q.out (added) and hive/branches/spark/ql/src/test/results/clientpositive/spark/input_part2.q.out Wed Sep 24 03:16:25 2014 differ

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into3.q.out?rev=1627210&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into3.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into3.q.out Wed Sep 24 03:16:25 2014
@@ -0,0 +1,414 @@
+PREHOOK: query: DROP TABLE insert_into3a
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into3a
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE insert_into3b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into3b
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE insert_into3a (key int, value string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert_into3a
+POSTHOOK: query: CREATE TABLE insert_into3a (key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert_into3a
+PREHOOK: query: CREATE TABLE insert_into3b (key int, value string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert_into3b
+POSTHOOK: query: CREATE TABLE insert_into3b (key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert_into3b
+PREHOOK: query: EXPLAIN FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
+                 INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
+                 INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-4 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-4, Stage-5
+  Stage-0 depends on stages: Stage-3
+  Stage-6 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-7 depends on stages: Stage-1
+  Stage-5 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-4
+    Spark
+      Edges:
+        Reducer 4 <- Map 1 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string), _col1 (type: string)
+                      sort order: ++
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+        Reducer 4 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 50
+                  Statistics: Num rows: 50 Data size: 500 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 50 Data size: 500 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 50 Data size: 500 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into3a
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into3a
+
+  Stage: Stage-6
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into3b
+
+  Stage: Stage-7
+    Stats-Aggr Operator
+
+  Stage: Stage-5
+    Spark
+      Edges:
+        Reducer 5 <- Map 2 (GROUP SORT)
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string), _col1 (type: string)
+                      sort order: ++
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+        Reducer 5 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 100
+                  Statistics: Num rows: 100 Data size: 1000 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 100 Data size: 1000 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 100 Data size: 1000 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into3b
+
+PREHOOK: query: FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
+         INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into3a
+PREHOOK: Output: default@insert_into3b
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
+         INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into3a
+POSTHOOK: Output: default@insert_into3b
+POSTHOOK: Lineage: insert_into3a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into3a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into3b.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into3b.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into3a
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into3a
+#### A masked pattern was here ####
+-1254133670
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into3b
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into3b
+#### A masked pattern was here ####
+-1142373758
+PREHOOK: query: EXPLAIN FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
+                 INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
+                 INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-4 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-4, Stage-5
+  Stage-0 depends on stages: Stage-3
+  Stage-6 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-7 depends on stages: Stage-1
+  Stage-5 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-4
+    Spark
+      Edges:
+        Reducer 4 <- Map 1 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Limit
+                      Number of rows: 10
+                      Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 4 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into3a
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into3a
+
+  Stage: Stage-6
+    Stats-Aggr Operator
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into3b
+
+  Stage: Stage-7
+    Stats-Aggr Operator
+
+  Stage: Stage-5
+    Spark
+      Edges:
+        Reducer 5 <- Map 2 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Limit
+                      Number of rows: 10
+                      Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 5 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into3b
+
+PREHOOK: query: FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
+         INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into3a
+PREHOOK: Output: default@insert_into3b
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
+         INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into3a
+POSTHOOK: Output: default@insert_into3b
+POSTHOOK: Lineage: insert_into3a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into3a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into3b.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into3b.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into3a
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into3a
+#### A masked pattern was here ####
+-826625916
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into3b
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into3b
+#### A masked pattern was here ####
+-1968999674
+PREHOOK: query: DROP TABLE insert_into3a
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into3a
+PREHOOK: Output: default@insert_into3a
+POSTHOOK: query: DROP TABLE insert_into3a
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into3a
+POSTHOOK: Output: default@insert_into3a
+PREHOOK: query: DROP TABLE insert_into3b
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into3b
+PREHOOK: Output: default@insert_into3b
+POSTHOOK: query: DROP TABLE insert_into3b
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into3b
+POSTHOOK: Output: default@insert_into3b