You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2015/09/18 01:48:56 UTC
[1/3] hive git commit: HIVE-11851 : LLAP: Test failures -
file.splitmetainfo file not found (Sergey Shelukhin) ADDENDUM
Repository: hive
Updated Branches:
refs/heads/llap 52a934f91 -> f2e124370
http://git-wip-us.apache.org/repos/asf/hive/blob/f2e12437/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out
new file mode 100644
index 0000000..2cf76a3
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out
@@ -0,0 +1,672 @@
+PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_PRECISION_txt
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_PRECISION_txt
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_PRECISION
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_PRECISION
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE DECIMAL_PRECISION_txt(dec decimal(20,10))
+ROW FORMAT DELIMITED
+ FIELDS TERMINATED BY ' '
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@DECIMAL_PRECISION_txt
+POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION_txt(dec decimal(20,10))
+ROW FORMAT DELIMITED
+ FIELDS TERMINATED BY ' '
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@DECIMAL_PRECISION_txt
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION_txt
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@decimal_precision_txt
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION_txt
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@decimal_precision_txt
+PREHOOK: query: CREATE TABLE DECIMAL_PRECISION(dec decimal(20,10))
+STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@DECIMAL_PRECISION
+POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION(dec decimal(20,10))
+STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@DECIMAL_PRECISION
+PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_PRECISION SELECT * FROM DECIMAL_PRECISION_txt
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision_txt
+PREHOOK: Output: default@decimal_precision
+POSTHOOK: query: INSERT OVERWRITE TABLE DECIMAL_PRECISION SELECT * FROM DECIMAL_PRECISION_txt
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision_txt
+POSTHOOK: Output: default@decimal_precision
+POSTHOOK: Lineage: decimal_precision.dec SIMPLE [(decimal_precision_txt)decimal_precision_txt.FieldSchema(name:dec, type:decimal(20,10), comment:null), ]
+PREHOOK: query: SELECT * FROM DECIMAL_PRECISION ORDER BY dec
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM DECIMAL_PRECISION ORDER BY dec
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+0
+0
+0
+0
+0
+0.123456789
+0.123456789
+1.2345678901
+1.2345678901
+1.2345678901
+12.3456789012
+12.3456789012
+12.3456789012
+123.4567890123
+123.4567890123
+123.4567890123
+1234.5678901235
+1234.5678901235
+1234.5678901235
+12345.6789012346
+12345.6789012346
+123456.7890123456
+123456.7890123457
+1234567.890123456
+1234567.8901234568
+12345678.90123456
+12345678.9012345679
+123456789.0123456
+123456789.0123456789
+1234567890.123456
+1234567890.123456789
+PREHOOK: query: SELECT dec, dec + 1, dec - 1 FROM DECIMAL_PRECISION ORDER BY dec
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dec, dec + 1, dec - 1 FROM DECIMAL_PRECISION ORDER BY dec
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+0 1 -1
+0 1 -1
+0 1 -1
+0 1 -1
+0 1 -1
+0.123456789 1.123456789 -0.876543211
+0.123456789 1.123456789 -0.876543211
+1.2345678901 2.2345678901 0.2345678901
+1.2345678901 2.2345678901 0.2345678901
+1.2345678901 2.2345678901 0.2345678901
+12.3456789012 13.3456789012 11.3456789012
+12.3456789012 13.3456789012 11.3456789012
+12.3456789012 13.3456789012 11.3456789012
+123.4567890123 124.4567890123 122.4567890123
+123.4567890123 124.4567890123 122.4567890123
+123.4567890123 124.4567890123 122.4567890123
+1234.5678901235 1235.5678901235 1233.5678901235
+1234.5678901235 1235.5678901235 1233.5678901235
+1234.5678901235 1235.5678901235 1233.5678901235
+12345.6789012346 12346.6789012346 12344.6789012346
+12345.6789012346 12346.6789012346 12344.6789012346
+123456.7890123456 123457.7890123456 123455.7890123456
+123456.7890123457 123457.7890123457 123455.7890123457
+1234567.890123456 1234568.890123456 1234566.890123456
+1234567.8901234568 1234568.8901234568 1234566.8901234568
+12345678.90123456 12345679.90123456 12345677.90123456
+12345678.9012345679 12345679.9012345679 12345677.9012345679
+123456789.0123456 123456790.0123456 123456788.0123456
+123456789.0123456789 123456790.0123456789 123456788.0123456789
+1234567890.123456 1234567891.123456 1234567889.123456
+1234567890.123456789 1234567891.123456789 1234567889.123456789
+PREHOOK: query: SELECT dec, dec * 2, dec / 3 FROM DECIMAL_PRECISION ORDER BY dec
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dec, dec * 2, dec / 3 FROM DECIMAL_PRECISION ORDER BY dec
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+NULL NULL NULL
+0 0 0
+0 0 0
+0 0 0
+0 0 0
+0 0 0
+0.123456789 0.246913578 0.041152263
+0.123456789 0.246913578 0.041152263
+1.2345678901 2.4691357802 0.411522630033
+1.2345678901 2.4691357802 0.411522630033
+1.2345678901 2.4691357802 0.411522630033
+12.3456789012 24.6913578024 4.1152263004
+12.3456789012 24.6913578024 4.1152263004
+12.3456789012 24.6913578024 4.1152263004
+123.4567890123 246.9135780246 41.1522630041
+123.4567890123 246.9135780246 41.1522630041
+123.4567890123 246.9135780246 41.1522630041
+1234.5678901235 2469.135780247 411.522630041167
+1234.5678901235 2469.135780247 411.522630041167
+1234.5678901235 2469.135780247 411.522630041167
+12345.6789012346 24691.3578024692 4115.226300411533
+12345.6789012346 24691.3578024692 4115.226300411533
+123456.7890123456 246913.5780246912 41152.2630041152
+123456.7890123457 246913.5780246914 41152.263004115233
+1234567.890123456 2469135.780246912 411522.630041152
+1234567.8901234568 2469135.7802469136 411522.630041152267
+12345678.90123456 24691357.80246912 4115226.30041152
+12345678.9012345679 24691357.8024691358 4115226.300411522633
+123456789.0123456 246913578.0246912 41152263.0041152
+123456789.0123456789 246913578.0246913578 41152263.0041152263
+1234567890.123456 2469135780.246912 411522630.041152
+1234567890.123456789 2469135780.246913578 411522630.041152263
+PREHOOK: query: SELECT dec, dec / 9 FROM DECIMAL_PRECISION ORDER BY dec
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dec, dec / 9 FROM DECIMAL_PRECISION ORDER BY dec
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+0 0
+0 0
+0 0
+0 0
+0 0
+0.123456789 0.013717421
+0.123456789 0.013717421
+1.2345678901 0.137174210011
+1.2345678901 0.137174210011
+1.2345678901 0.137174210011
+12.3456789012 1.371742100133
+12.3456789012 1.371742100133
+12.3456789012 1.371742100133
+123.4567890123 13.717421001367
+123.4567890123 13.717421001367
+123.4567890123 13.717421001367
+1234.5678901235 137.174210013722
+1234.5678901235 137.174210013722
+1234.5678901235 137.174210013722
+12345.6789012346 1371.742100137178
+12345.6789012346 1371.742100137178
+123456.7890123456 13717.421001371733
+123456.7890123457 13717.421001371744
+1234567.890123456 137174.210013717333
+1234567.8901234568 137174.210013717422
+12345678.90123456 1371742.100137173333
+12345678.9012345679 1371742.100137174211
+123456789.0123456 13717421.001371733333
+123456789.0123456789 13717421.0013717421
+1234567890.123456 137174210.013717333333
+1234567890.123456789 137174210.013717421
+PREHOOK: query: SELECT dec, dec / 27 FROM DECIMAL_PRECISION ORDER BY dec
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dec, dec / 27 FROM DECIMAL_PRECISION ORDER BY dec
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+0 0
+0 0
+0 0
+0 0
+0 0
+0.123456789 0.0045724736667
+0.123456789 0.0045724736667
+1.2345678901 0.0457247366704
+1.2345678901 0.0457247366704
+1.2345678901 0.0457247366704
+12.3456789012 0.4572473667111
+12.3456789012 0.4572473667111
+12.3456789012 0.4572473667111
+123.4567890123 4.5724736671222
+123.4567890123 4.5724736671222
+123.4567890123 4.5724736671222
+1234.5678901235 45.7247366712407
+1234.5678901235 45.7247366712407
+1234.5678901235 45.7247366712407
+12345.6789012346 457.2473667123926
+12345.6789012346 457.2473667123926
+123456.7890123456 4572.4736671239111
+123456.7890123457 4572.4736671239148
+1234567.890123456 45724.7366712391111
+1234567.8901234568 45724.7366712391407
+12345678.90123456 457247.3667123911111
+12345678.9012345679 457247.3667123914037
+123456789.0123456 4572473.6671239111111
+123456789.0123456789 4572473.6671239140333
+1234567890.123456 45724736.6712391111111
+1234567890.123456789 45724736.6712391403333
+PREHOOK: query: SELECT dec, dec * dec FROM DECIMAL_PRECISION ORDER BY dec
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dec, dec * dec FROM DECIMAL_PRECISION ORDER BY dec
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+0 0
+0 0
+0 0
+0 0
+0 0
+0.123456789 0.015241578750190521
+0.123456789 0.015241578750190521
+1.2345678901 1.52415787526596567801
+1.2345678901 1.52415787526596567801
+1.2345678901 1.52415787526596567801
+12.3456789012 152.41578753153483936144
+12.3456789012 152.41578753153483936144
+12.3456789012 152.41578753153483936144
+123.4567890123 15241.57875322755800955129
+123.4567890123 15241.57875322755800955129
+123.4567890123 15241.57875322755800955129
+1234.5678901235 1524157.87532399036884525225
+1234.5678901235 1524157.87532399036884525225
+1234.5678901235 1524157.87532399036884525225
+12345.6789012346 152415787.53238916034140423716
+12345.6789012346 152415787.53238916034140423716
+123456.7890123456 15241578753.23881726870921383936
+123456.7890123457 15241578753.23884196006701630849
+1234567.890123456 1524157875323.881726870921383936
+1234567.8901234568 1524157875323.88370217954558146624
+12345678.90123456 152415787532388.1726870921383936
+12345678.9012345679 152415787532388.36774881877789971041
+123456789.0123456 15241578753238817.26870921383936
+123456789.0123456789 15241578753238836.75019051998750190521
+1234567890.123456 NULL
+1234567890.123456789 NULL
+PREHOOK: query: EXPLAIN SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: decimal_precision
+ Statistics: Num rows: 75 Data size: 3472 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: dec (type: decimal(20,10))
+ outputColumnNames: _col0
+ Statistics: Num rows: 75 Data size: 3472 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: avg(_col0), sum(_col0)
+ mode: hash
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: struct<count:bigint,sum:decimal(30,10),input:decimal(20,10)>), _col1 (type: decimal(30,10))
+ Execution mode: vectorized, llap
+ Reducer 2
+ Execution mode: uber
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: avg(VALUE._col0), sum(VALUE._col1)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+88499534.57586576220645 2743485571.8518386284
+PREHOOK: query: SELECT dec * cast('12345678901234567890.12345678' as decimal(38,18)) FROM DECIMAL_PRECISION LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dec * cast('12345678901234567890.12345678' as decimal(38,18)) FROM DECIMAL_PRECISION LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+NULL
+PREHOOK: query: SELECT * from DECIMAL_PRECISION WHERE dec > cast('1234567890123456789012345678.12345678' as decimal(38,18)) LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * from DECIMAL_PRECISION WHERE dec > cast('1234567890123456789012345678.12345678' as decimal(38,18)) LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+PREHOOK: query: SELECT dec * 12345678901234567890.12345678 FROM DECIMAL_PRECISION LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dec * 12345678901234567890.12345678 FROM DECIMAL_PRECISION LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+NULL
+PREHOOK: query: SELECT MIN(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT MIN(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+12345678901234567890.12345678
+PREHOOK: query: SELECT COUNT(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT COUNT(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_precision
+#### A masked pattern was here ####
+75
+PREHOOK: query: DROP TABLE DECIMAL_PRECISION_txt
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@decimal_precision_txt
+PREHOOK: Output: default@decimal_precision_txt
+POSTHOOK: query: DROP TABLE DECIMAL_PRECISION_txt
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@decimal_precision_txt
+POSTHOOK: Output: default@decimal_precision_txt
+PREHOOK: query: DROP TABLE DECIMAL_PRECISION
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@decimal_precision
+PREHOOK: Output: default@decimal_precision
+POSTHOOK: query: DROP TABLE DECIMAL_PRECISION
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@decimal_precision
+POSTHOOK: Output: default@decimal_precision
[3/3] hive git commit: HIVE-11851 : LLAP: Test failures -
file.splitmetainfo file not found (Sergey Shelukhin) ADDENDUM
Posted by se...@apache.org.
HIVE-11851 : LLAP: Test failures - file.splitmetainfo file not found (Sergey Shelukhin) ADDENDUM
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f2e12437
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f2e12437
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f2e12437
Branch: refs/heads/llap
Commit: f2e124370823693889dc5296170d5e5cb55177e9
Parents: 52a934f
Author: Sergey Shelukhin <se...@apache.org>
Authored: Thu Sep 17 16:49:39 2015 -0700
Committer: Sergey Shelukhin <se...@apache.org>
Committed: Thu Sep 17 16:49:39 2015 -0700
----------------------------------------------------------------------
.../clientpositive/llap/metadataonly1.q.out | 2050 +++++++++++++++++
.../clientpositive/llap/optimize_nullscan.q.out | 2159 ++++++++++++++++++
.../llap/vector_decimal_precision.q.out | 672 ++++++
3 files changed, 4881 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/f2e12437/ql/src/test/results/clientpositive/llap/metadataonly1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/metadataonly1.q.out b/ql/src/test/results/clientpositive/llap/metadataonly1.q.out
new file mode 100644
index 0000000..d569a23
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/metadataonly1.q.out
@@ -0,0 +1,2050 @@
+PREHOOK: query: CREATE TABLE TEST1(A INT, B DOUBLE) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@TEST1
+POSTHOOK: query: CREATE TABLE TEST1(A INT, B DOUBLE) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@TEST1
+PREHOOK: query: explain extended select max(ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select max(ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ max
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: max(_col0)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: string)
+ auto parallelism: false
+ Execution mode: llap
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: max(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types string
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select max(ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+#### A masked pattern was here ####
+POSTHOOK: query: select max(ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+#### A masked pattern was here ####
+NULL
+PREHOOK: query: alter table TEST1 add partition (ds='1')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test1
+POSTHOOK: query: alter table TEST1 add partition (ds='1')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test1
+POSTHOOK: Output: default@test1@ds=1
+PREHOOK: query: explain extended select max(ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select max(ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ max
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: max(_col0)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: string)
+ auto parallelism: false
+ Path -> Alias:
+ -mr-10004default.test1{ds=1} [test1]
+ Path -> Partition:
+ -mr-10004default.test1{ds=1}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ Truncated Path -> Alias:
+ -mr-10004default.test1{ds=1} [test1]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: max(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types string
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select max(ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test1@ds=1
+#### A masked pattern was here ####
+POSTHOOK: query: select max(ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test1@ds=1
+#### A masked pattern was here ####
+1
+PREHOOK: query: explain extended select count(distinct ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select count(distinct ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTIONDI
+ count
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count(DISTINCT _col0)
+ keys: _col0 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ tag: -1
+ auto parallelism: false
+ Path -> Alias:
+ -mr-10004default.test1{ds=1} [test1]
+ Path -> Partition:
+ -mr-10004default.test1{ds=1}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ Truncated Path -> Alias:
+ -mr-10004default.test1{ds=1} [test1]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(DISTINCT KEY._col0:0._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select count(distinct ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test1@ds=1
+#### A masked pattern was here ####
+POSTHOOK: query: select count(distinct ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test1@ds=1
+#### A masked pattern was here ####
+1
+PREHOOK: query: explain extended select count(ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select count(ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ count
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count(_col0)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: bigint)
+ auto parallelism: false
+ Execution mode: llap
+ Path -> Alias:
+#### A masked pattern was here ####
+ Path -> Partition:
+#### A masked pattern was here ####
+ Partition
+ base file name: ds=1
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ Truncated Path -> Alias:
+ /test1/ds=1 [test1]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select count(ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test1@ds=1
+#### A masked pattern was here ####
+POSTHOOK: query: select count(ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test1@ds=1
+#### A masked pattern was here ####
+0
+PREHOOK: query: alter table TEST1 add partition (ds='2')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test1
+POSTHOOK: query: alter table TEST1 add partition (ds='2')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test1
+POSTHOOK: Output: default@test1@ds=2
+PREHOOK: query: explain extended
+select count(*) from TEST1 a2 join (select max(ds) m from TEST1) b on a2.ds=b.m
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select count(*) from TEST1 a2 join (select max(ds) m from TEST1) b on a2.ds=b.m
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_JOIN
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ a2
+ TOK_SUBQUERY
+ TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ max
+ TOK_TABLE_OR_COL
+ ds
+ m
+ b
+ =
+ .
+ TOK_TABLE_OR_COL
+ a2
+ ds
+ .
+ TOK_TABLE_OR_COL
+ b
+ m
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTIONSTAR
+ count
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+ Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+ Reducer 5 <- Map 4 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: a2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ tag: 0
+ auto parallelism: true
+ Execution mode: llap
+ Path -> Alias:
+#### A masked pattern was here ####
+ Path -> Partition:
+#### A masked pattern was here ####
+ Partition
+ base file name: ds=1
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+#### A masked pattern was here ####
+ Partition
+ base file name: ds=2
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ Truncated Path -> Alias:
+ /test1/ds=1 [a2]
+ /test1/ds=2 [a2]
+ Map 4
+ Map Operator Tree:
+ TableScan
+ alias: a2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: max(_col0)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: string)
+ auto parallelism: false
+ Execution mode: llap
+ Path -> Alias:
+#### A masked pattern was here ####
+ Path -> Partition:
+#### A masked pattern was here ####
+ Partition
+ base file name: ds=1
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+#### A masked pattern was here ####
+ Partition
+ base file name: ds=2
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ Truncated Path -> Alias:
+ /test1/ds=1 [a2]
+ /test1/ds=2 [a2]
+ Reducer 2
+ Execution mode: llap
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Merge Join Operator
+ condition map:
+ Inner Join 0 to 1
+ keys:
+ 0 _col0 (type: string)
+ 1 _col0 (type: string)
+ Position of Big Table: 0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count()
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: bigint)
+ auto parallelism: false
+ Reducer 3
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+ Reducer 5
+ Execution mode: llap
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: max(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ isSamplingPred: false
+ predicate: _col0 is not null (type: boolean)
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ tag: 1
+ auto parallelism: true
+ Select Operator
+ expressions: _col0 (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: string)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Dynamic Partitioning Event Operator
+ Target Input: a2
+ Partition key expr: ds
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Target column: ds
+ Target Vertex: Map 1
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select count(*) from TEST1 a2 join (select max(ds) m from TEST1) b on a2.ds=b.m
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test1@ds=1
+PREHOOK: Input: default@test1@ds=2
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from TEST1 a2 join (select max(ds) m from TEST1) b on a2.ds=b.m
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test1@ds=1
+POSTHOOK: Input: default@test1@ds=2
+#### A masked pattern was here ####
+0
+PREHOOK: query: CREATE TABLE TEST2(A INT, B DOUBLE) partitioned by (ds string, hr string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@TEST2
+POSTHOOK: query: CREATE TABLE TEST2(A INT, B DOUBLE) partitioned by (ds string, hr string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@TEST2
+PREHOOK: query: alter table TEST2 add partition (ds='1', hr='1')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test2
+POSTHOOK: query: alter table TEST2 add partition (ds='1', hr='1')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test2
+POSTHOOK: Output: default@test2@ds=1/hr=1
+PREHOOK: query: alter table TEST2 add partition (ds='1', hr='2')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test2
+POSTHOOK: query: alter table TEST2 add partition (ds='1', hr='2')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test2
+POSTHOOK: Output: default@test2@ds=1/hr=2
+PREHOOK: query: alter table TEST2 add partition (ds='1', hr='3')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test2
+POSTHOOK: query: alter table TEST2 add partition (ds='1', hr='3')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test2
+POSTHOOK: Output: default@test2@ds=1/hr=3
+PREHOOK: query: explain extended select ds, count(distinct hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds, count(distinct hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST2
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_TABLE_OR_COL
+ ds
+ TOK_SELEXPR
+ TOK_FUNCTIONDI
+ count
+ TOK_TABLE_OR_COL
+ hr
+ TOK_GROUPBY
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string), hr (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count(DISTINCT _col1)
+ keys: _col0 (type: string), _col1 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string)
+ sort order: ++
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ tag: -1
+ auto parallelism: true
+ Path -> Alias:
+ -mr-10004default.test2{ds=1, hr=1} [test2]
+ -mr-10005default.test2{ds=1, hr=2} [test2]
+ -mr-10006default.test2{ds=1, hr=3} [test2]
+ Path -> Partition:
+ -mr-10004default.test2{ds=1, hr=1}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ -mr-10005default.test2{ds=1, hr=2}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 2
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ -mr-10006default.test2{ds=1, hr=3}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 3
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ Truncated Path -> Alias:
+ -mr-10004default.test2{ds=1, hr=1} [test2]
+ -mr-10005default.test2{ds=1, hr=2} [test2]
+ -mr-10006default.test2{ds=1, hr=3} [test2]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(DISTINCT KEY._col1:0._col0)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ columns.types string:bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select ds, count(distinct hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test2
+PREHOOK: Input: default@test2@ds=1/hr=1
+PREHOOK: Input: default@test2@ds=1/hr=2
+PREHOOK: Input: default@test2@ds=1/hr=3
+#### A masked pattern was here ####
+POSTHOOK: query: select ds, count(distinct hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test2
+POSTHOOK: Input: default@test2@ds=1/hr=1
+POSTHOOK: Input: default@test2@ds=1/hr=2
+POSTHOOK: Input: default@test2@ds=1/hr=3
+#### A masked pattern was here ####
+1 3
+PREHOOK: query: explain extended select ds, count(hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds, count(hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST2
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_TABLE_OR_COL
+ ds
+ TOK_SELEXPR
+ TOK_FUNCTION
+ count
+ TOK_TABLE_OR_COL
+ hr
+ TOK_GROUPBY
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string), hr (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count(_col1)
+ keys: _col0 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ tag: -1
+ value expressions: _col1 (type: bigint)
+ auto parallelism: true
+ Execution mode: llap
+ Path -> Alias:
+#### A masked pattern was here ####
+ Path -> Partition:
+#### A masked pattern was here ####
+ Partition
+ base file name: hr=1
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+#### A masked pattern was here ####
+ Partition
+ base file name: hr=2
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 2
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+#### A masked pattern was here ####
+ Partition
+ base file name: hr=3
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 3
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ Truncated Path -> Alias:
+ /test2/ds=1/hr=1 [test2]
+ /test2/ds=1/hr=2 [test2]
+ /test2/ds=1/hr=3 [test2]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ columns.types string:bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select ds, count(hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test2
+PREHOOK: Input: default@test2@ds=1/hr=1
+PREHOOK: Input: default@test2@ds=1/hr=2
+PREHOOK: Input: default@test2@ds=1/hr=3
+#### A masked pattern was here ####
+POSTHOOK: query: select ds, count(hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test2
+POSTHOOK: Input: default@test2@ds=1/hr=1
+POSTHOOK: Input: default@test2@ds=1/hr=2
+POSTHOOK: Input: default@test2@ds=1/hr=3
+#### A masked pattern was here ####
+PREHOOK: query: explain extended select max(ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select max(ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ max
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: max(_col0)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: string)
+ auto parallelism: false
+ Path -> Alias:
+ -mr-10004default.test1{ds=1} [test1]
+ -mr-10005default.test1{ds=2} [test1]
+ Path -> Partition:
+ -mr-10004default.test1{ds=1}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ -mr-10005default.test1{ds=2}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ Truncated Path -> Alias:
+ -mr-10004default.test1{ds=1} [test1]
+ -mr-10005default.test1{ds=2} [test1]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: max(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types string
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select max(ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test1@ds=1
+PREHOOK: Input: default@test1@ds=2
+#### A masked pattern was here ####
+POSTHOOK: query: select max(ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test1@ds=1
+POSTHOOK: Input: default@test1@ds=2
+#### A masked pattern was here ####
+2
+PREHOOK: query: select distinct ds from srcpart
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select distinct ds from srcpart
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+2008-04-08
+2008-04-09
+PREHOOK: query: select min(ds),max(ds) from srcpart
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select min(ds),max(ds) from srcpart
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+2008-04-08 2008-04-09
+PREHOOK: query: -- HIVE-3594 URI encoding for temporary path
+alter table TEST2 add partition (ds='01:10:10', hr='01')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test2
+POSTHOOK: query: -- HIVE-3594 URI encoding for temporary path
+alter table TEST2 add partition (ds='01:10:10', hr='01')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test2
+POSTHOOK: Output: default@test2@ds=01%3A10%3A10/hr=01
+PREHOOK: query: alter table TEST2 add partition (ds='01:10:20', hr='02')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test2
+POSTHOOK: query: alter table TEST2 add partition (ds='01:10:20', hr='02')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test2
+POSTHOOK: Output: default@test2@ds=01%3A10%3A20/hr=02
+PREHOOK: query: explain extended select ds, count(distinct hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds, count(distinct hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST2
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_TABLE_OR_COL
+ ds
+ TOK_SELEXPR
+ TOK_FUNCTIONDI
+ count
+ TOK_TABLE_OR_COL
+ hr
+ TOK_GROUPBY
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string), hr (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count(DISTINCT _col1)
+ keys: _col0 (type: string), _col1 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string)
+ sort order: ++
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ tag: -1
+ auto parallelism: true
+ Path -> Alias:
+ -mr-10004default.test2{ds=01_10_10, hr=01} [test2]
+ -mr-10005default.test2{ds=01_10_20, hr=02} [test2]
+ -mr-10006default.test2{ds=1, hr=1} [test2]
+ -mr-10007default.test2{ds=1, hr=2} [test2]
+ -mr-10008default.test2{ds=1, hr=3} [test2]
+ Path -> Partition:
+ -mr-10004default.test2{ds=01_10_10, hr=01}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 01:10:10
+ hr 01
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ -mr-10005default.test2{ds=01_10_20, hr=02}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 01:10:20
+ hr 02
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ -mr-10006default.test2{ds=1, hr=1}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ -mr-10007default.test2{ds=1, hr=2}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 2
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ -mr-10008default.test2{ds=1, hr=3}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 3
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ Truncated Path -> Alias:
+ -mr-10004default.test2{ds=01_10_10, hr=01} [test2]
+ -mr-10005default.test2{ds=01_10_20, hr=02} [test2]
+ -mr-10006default.test2{ds=1, hr=1} [test2]
+ -mr-10007default.test2{ds=1, hr=2} [test2]
+ -mr-10008default.test2{ds=1, hr=3} [test2]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(DISTINCT KEY._col1:0._col0)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ columns.types string:bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select ds, count(distinct hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test2
+PREHOOK: Input: default@test2@ds=01%3A10%3A10/hr=01
+PREHOOK: Input: default@test2@ds=01%3A10%3A20/hr=02
+PREHOOK: Input: default@test2@ds=1/hr=1
+PREHOOK: Input: default@test2@ds=1/hr=2
+PREHOOK: Input: default@test2@ds=1/hr=3
+#### A masked pattern was here ####
+POSTHOOK: query: select ds, count(distinct hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test2
+POSTHOOK: Input: default@test2@ds=01%3A10%3A10/hr=01
+POSTHOOK: Input: default@test2@ds=01%3A10%3A20/hr=02
+POSTHOOK: Input: default@test2@ds=1/hr=1
+POSTHOOK: Input: default@test2@ds=1/hr=2
+POSTHOOK: Input: default@test2@ds=1/hr=3
+#### A masked pattern was here ####
+01:10:10 1
+01:10:20 1
+1 3
[2/3] hive git commit: HIVE-11851 : LLAP: Test failures -
file.splitmetainfo file not found (Sergey Shelukhin) ADDENDUM
Posted by se...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/f2e12437/ql/src/test/results/clientpositive/llap/optimize_nullscan.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/optimize_nullscan.q.out b/ql/src/test/results/clientpositive/llap/optimize_nullscan.q.out
new file mode 100644
index 0000000..d58d033
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/optimize_nullscan.q.out
@@ -0,0 +1,2159 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+-- Disable CBO here, because it messes with the cases specifically crafted for the optimizer.
+-- Instead, we could improve the optimizer to recognize more cases, e.g. filter before join.
+
+explain extended
+select key from src where false
+PREHOOK: type: QUERY
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+-- Disable CBO here, because it messes with the cases specifically crafted for the optimizer.
+-- Instead, we could improve the optimizer to recognize more cases, e.g. filter before join.
+
+explain extended
+select key from src where false
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ src
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_TABLE_OR_COL
+ key
+ TOK_WHERE
+ false
+
+
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: src
+ GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: false (type: boolean)
+ Select Operator
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: select key from src where false
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select key from src where false
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+PREHOOK: query: explain extended
+select count(key) from srcpart where 1=2 group by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select count(key) from srcpart where 1=2 group by key
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ srcpart
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ count
+ TOK_TABLE_OR_COL
+ key
+ TOK_WHERE
+ =
+ 1
+ 2
+ TOK_GROUPBY
+ TOK_TABLE_OR_COL
+ key
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: srcpart
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: false (type: boolean)
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count(key)
+ keys: key (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ tag: -1
+ value expressions: _col1 (type: bigint)
+ auto parallelism: true
+ Execution mode: llap
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: bigint)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select count(key) from srcpart where 1=2 group by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+#### A masked pattern was here ####
+POSTHOOK: query: select count(key) from srcpart where 1=2 group by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+#### A masked pattern was here ####
+PREHOOK: query: explain extended
+select * from (select key from src where false) a left outer join (select key from srcpart limit 0) b on a.key=b.key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from (select key from src where false) a left outer join (select key from srcpart limit 0) b on a.key=b.key
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_LEFTOUTERJOIN
+ TOK_SUBQUERY
+ TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ src
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_TABLE_OR_COL
+ key
+ TOK_WHERE
+ false
+ a
+ TOK_SUBQUERY
+ TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ srcpart
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_TABLE_OR_COL
+ key
+ TOK_LIMIT
+ 0
+ b
+ =
+ .
+ TOK_TABLE_OR_COL
+ a
+ key
+ .
+ TOK_TABLE_OR_COL
+ b
+ key
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_ALLCOLREF
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+ Reducer 4 <- Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: src
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+ GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: false (type: boolean)
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ tag: 0
+ auto parallelism: true
+ Path -> Alias:
+ -mr-10003default.src{} [src]
+ Path -> Partition:
+ -mr-10003default.src{}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src
+ name: default.src
+ Truncated Path -> Alias:
+ -mr-10003default.src{} [src]
+ Map 3
+ Map Operator Tree:
+ TableScan
+ alias: srcpart
+ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+ Limit
+ Number of rows: 0
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: string)
+ auto parallelism: false
+ Path -> Alias:
+ -mr-10004default.srcpart{ds=2008-04-08, hr=11} [srcpart]
+ -mr-10005default.srcpart{ds=2008-04-08, hr=12} [srcpart]
+ -mr-10006default.srcpart{ds=2008-04-09, hr=11} [srcpart]
+ -mr-10007default.srcpart{ds=2008-04-09, hr=12} [srcpart]
+ Path -> Partition:
+ -mr-10004default.srcpart{ds=2008-04-08, hr=11}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-08
+ hr 11
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ numFiles 1
+ numRows 500
+ partition_columns ds/hr
+ partition_columns.types string:string
+ rawDataSize 5312
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcpart
+ name: default.srcpart
+ -mr-10005default.srcpart{ds=2008-04-08, hr=12}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-08
+ hr 12
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ numFiles 1
+ numRows 500
+ partition_columns ds/hr
+ partition_columns.types string:string
+ rawDataSize 5312
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcpart
+ name: default.srcpart
+ -mr-10006default.srcpart{ds=2008-04-09, hr=11}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-09
+ hr 11
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ numFiles 1
+ numRows 500
+ partition_columns ds/hr
+ partition_columns.types string:string
+ rawDataSize 5312
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcpart
+ name: default.srcpart
+ -mr-10007default.srcpart{ds=2008-04-09, hr=12}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-09
+ hr 12
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ numFiles 1
+ numRows 500
+ partition_columns ds/hr
+ partition_columns.types string:string
+ rawDataSize 5312
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcpart
+ name: default.srcpart
+ Truncated Path -> Alias:
+ -mr-10004default.srcpart{ds=2008-04-08, hr=11} [srcpart]
+ -mr-10005default.srcpart{ds=2008-04-08, hr=12} [srcpart]
+ -mr-10006default.srcpart{ds=2008-04-09, hr=11} [srcpart]
+ -mr-10007default.srcpart{ds=2008-04-09, hr=12} [srcpart]
+ Reducer 2
+ Execution mode: llap
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Merge Join Operator
+ condition map:
+ Left Outer Join0 to 1
+ keys:
+ 0 _col0 (type: string)
+ 1 _col0 (type: string)
+ outputColumnNames: _col0, _col1
+ Position of Big Table: 0
+ Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ columns.types string:string
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+ Reducer 4
+ Execution mode: llap
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Select Operator
+ expressions: VALUE._col0 (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Limit
+ Number of rows: 0
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ tag: 1
+ auto parallelism: true
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select * from (select key from src where false) a left outer join (select key from srcpart limit 0) b on a.key=b.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from (select key from src where false) a left outer join (select key from srcpart limit 0) b on a.key=b.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+PREHOOK: query: explain extended
+select count(key) from src where false union all select count(key) from srcpart
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select count(key) from src where false union all select count(key) from srcpart
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_SUBQUERY
+ TOK_UNIONALL
+ TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ src
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ count
+ TOK_TABLE_OR_COL
+ key
+ TOK_WHERE
+ false
+ TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ srcpart
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ count
+ TOK_TABLE_OR_COL
+ key
+ _u1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_ALLCOLREF
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE), Union 3 (CONTAINS)
+ Reducer 5 <- Map 4 (SIMPLE_EDGE), Union 3 (CONTAINS)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: src
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+ GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: false (type: boolean)
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: count(key)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: bigint)
+ auto parallelism: false
+ Path -> Alias:
+ -mr-10003default.src{} [src]
+ Path -> Partition:
+ -mr-10003default.src{}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src
+ name: default.src
+ Truncated Path -> Alias:
+ -mr-10003default.src{} [src]
+ Map 4
+ Map Operator Tree:
+ TableScan
+ alias: srcpart
+ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: key (type: string)
+ outputColumnNames: key
+ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: count(key)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: bigint)
+ auto parallelism: false
+ Execution mode: llap
+ Path -> Alias:
+#### A masked pattern was here ####
+ Path -> Partition:
+#### A masked pattern was here ####
+ Partition
+ base file name: hr=11
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-08
+ hr 11
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ numFiles 1
+ numRows 500
+ partition_columns ds/hr
+ partition_columns.types string:string
+ rawDataSize 5312
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcpart
+ name: default.srcpart
+#### A masked pattern was here ####
+ Partition
+ base file name: hr=12
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-08
+ hr 12
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ numFiles 1
+ numRows 500
+ partition_columns ds/hr
+ partition_columns.types string:string
+ rawDataSize 5312
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcpart
+ name: default.srcpart
+#### A masked pattern was here ####
+ Partition
+ base file name: hr=11
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-09
+ hr 11
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ numFiles 1
+ numRows 500
+ partition_columns ds/hr
+ partition_columns.types string:string
+ rawDataSize 5312
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcpart
+ name: default.srcpart
+#### A masked pattern was here ####
+ Partition
+ base file name: hr=12
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-09
+ hr 12
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ numFiles 1
+ numRows 500
+ partition_columns ds/hr
+ partition_columns.types string:string
+ rawDataSize 5312
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcpart
+ name: default.srcpart
+ Truncated Path -> Alias:
+ /srcpart/ds=2008-04-08/hr=11 [srcpart]
+ /srcpart/ds=2008-04-08/hr=12 [srcpart]
+ /srcpart/ds=2008-04-09/hr=11 [srcpart]
+ /srcpart/ds=2008-04-09/hr=12 [srcpart]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+ Reducer 5
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+ Union 3
+ Vertex: Union 3
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select count(key) from src where false union all select count(key) from srcpart
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(key) from src where false union all select count(key) from srcpart
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+0
+2000
+Warning: Shuffle Join MERGEJOIN[15][tables = [a, b]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: explain extended
+select * from (select key from src where false) a left outer join (select value from srcpart limit 0) b
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from (select key from src where false) a left outer join (select value from srcpart limit 0) b
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_LEFTOUTERJOIN
+ TOK_SUBQUERY
+ TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ src
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_TABLE_OR_COL
+ key
+ TOK_WHERE
+ false
+ a
+ TOK_SUBQUERY
+ TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ srcpart
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_TABLE_OR_COL
+ value
+ TOK_LIMIT
+ 0
+ b
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_ALLCOLREF
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+ Reducer 4 <- Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: src
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+ GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: false (type: boolean)
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ tag: 0
+ value expressions: _col0 (type: string)
+ auto parallelism: false
+ Path -> Alias:
+ -mr-10003default.src{} [src]
+ Path -> Partition:
+ -mr-10003default.src{}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src
+ name: default.src
+ Truncated Path -> Alias:
+ -mr-10003default.src{} [src]
+ Map 3
+ Map Operator Tree:
+ TableScan
+ alias: srcpart
+ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: value (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+ Limit
+ Number of rows: 0
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: string)
+ auto parallelism: false
+ Path -> Alias:
+ -mr-10004default.srcpart{ds=2008-04-08, hr=11} [srcpart]
+ -mr-10005default.srcpart{ds=2008-04-08, hr=12} [srcpart]
+ -mr-10006default.srcpart{ds=2008-04-09, hr=11} [srcpart]
+ -mr-10007default.srcpart{ds=2008-04-09, hr=12} [srcpart]
+ Path -> Partition:
+ -mr-10004default.srcpart{ds=2008-04-08, hr=11}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-08
+ hr 11
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ numFiles 1
+ numRows 500
+ partition_columns ds/hr
+ partition_columns.types string:string
+ rawDataSize 5312
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcpart
+ name: default.srcpart
+ -mr-10005default.srcpart{ds=2008-04-08, hr=12}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-08
+ hr 12
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ numFiles 1
+ numRows 500
+ partition_columns ds/hr
+ partition_columns.types string:string
+ rawDataSize 5312
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcpart
+ name: default.srcpart
+ -mr-10006default.srcpart{ds=2008-04-09, hr=11}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-09
+ hr 11
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ numFiles 1
+ numRows 500
+ partition_columns ds/hr
+ partition_columns.types string:string
+ rawDataSize 5312
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcpart
+ name: default.srcpart
+ -mr-10007default.srcpart{ds=2008-04-09, hr=12}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-09
+ hr 12
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ numFiles 1
+ numRows 500
+ partition_columns ds/hr
+ partition_columns.types string:string
+ rawDataSize 5312
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.srcpart
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.srcpart
+ name: default.srcpart
+ Truncated Path -> Alias:
+ -mr-10004default.srcpart{ds=2008-04-08, hr=11} [srcpart]
+ -mr-10005default.srcpart{ds=2008-04-08, hr=12} [srcpart]
+ -mr-10006default.srcpart{ds=2008-04-09, hr=11} [srcpart]
+ -mr-10007default.srcpart{ds=2008-04-09, hr=12} [srcpart]
+ Reducer 2
+ Execution mode: llap
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Merge Join Operator
+ condition map:
+ Left Outer Join0 to 1
+ keys:
+ 0
+ 1
+ outputColumnNames: _col0, _col1
+ Position of Big Table: 0
+ Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ columns.types string:string
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+ Reducer 4
+ Execution mode: llap
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Select Operator
+ expressions: VALUE._col0 (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Limit
+ Number of rows: 0
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ tag: 1
+ value expressions: _col0 (type: string)
+ auto parallelism: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+Warning: Shuffle Join MERGEJOIN[15][tables = [a, b]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: select * from (select key from src where false) a left outer join (select value from srcpart limit 0) b
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from (select key from src where false) a left outer join (select value from srcpart limit 0) b
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+PREHOOK: query: explain extended
+select * from (select key from src union all select src.key from src left outer join srcpart on src.key = srcpart.key) a where false
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from (select key from src union all select src.key from src left outer join srcpart on src.key = srcpart.key) a where false
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_SUBQUERY
+ TOK_UNIONALL
+ TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ src
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_TABLE_OR_COL
+ key
+ TOK_QUERY
+ TOK_FROM
+ TOK_LEFTOUTERJOIN
+ TOK_TABREF
+ TOK_TABNAME
+ src
+ TOK_TABREF
+ TOK_TABNAME
+ srcpart
+ =
+ .
+ TOK_TABLE_OR_COL
+ src
+ key
+ .
+ TOK_TABLE_OR_COL
+ srcpart
+ key
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ .
+ TOK_TABLE_OR_COL
+ src
+ key
+ a
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_ALLCOLREF
+ TOK_WHERE
+ false
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Map 1 <- Union 2 (CONTAINS)
+ Reducer 4 <- Map 3 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE), Union 2 (CONTAINS)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: src
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+ GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: false (type: boolean)
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types string
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+ Execution mode: llap
+ Path -> Alias:
+#### A masked pattern was here ####
+ Path -> Partition:
+#### A masked pattern was here ####
+ Partition
+ base file name: src
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src
+ name: default.src
+ Truncated Path -> Alias:
+ /src [src]
+ Map 3
+ Map Operator Tree:
+ TableScan
+ alias: src
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+ GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: false (type: boolean)
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: key (type: string)
+ sort order: +
+ Map-reduce partition columns: key (type: string)
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ tag: 0
+ auto parallelism: true
+ Path -> Alias:
+ -mr-10003default.src{} [src]
+ Path -> Partition:
+ -mr-10003default.src{}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src
+ name: default.src
+ Truncated Path -> Alias:
+ -mr-10003default.src{} [src]
+ Map 5
+ Map Operator Tree:
+ TableScan
+ alias: srcpart
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: false (type: boolean)
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Reduce Output Operator
+ key expressions: key (type: string)
+ sort order: +
+ Map-reduce partition columns: key (type: string)
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ tag: 1
+ auto parallelism: true
+ Execution mode: llap
+ Reducer 4
+ Execution mode: llap
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Merge Join Operator
+ condition map:
+ Left Outer Join0 to 1
+ keys:
+ 0 key (type: string)
+ 1 key (type: string)
+ outputColumnNames: _col0
+ Position of Big Table: 0
+ Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types string
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+ Union 2
+ Vertex: Union 2
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select * from (select key from src union all select src.key from src left outer join srcpart on src.key = srcpart.key) a where false
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart
+#### A masked pattern was here ####
+POSTHOOK: query: select * from (select key from src union all select src.key from src left outer join srcpart on src.key = srcpart.key) a where false
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart
+#### A masked pattern was here ####
+PREHOOK: query: explain extended
+select * from src s1, src s2 where false and s1.value = s2.value
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from src s1, src s2 where false and s1.value = s2.value
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_JOIN
+ TOK_TABREF
+ TOK_TABNAME
+ src
+ s1
+ TOK_TABREF
+ TOK_TABNAME
+ src
+ s2
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_ALLCOLREF
+ TOK_WHERE
+ and
+ false
+ =
+ .
+ TOK_TABLE_OR_COL
+ s1
+ value
+ .
+ TOK_TABLE_OR_COL
+ s2
+ value
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: s1
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+ GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: false (type: boolean)
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: value (type: string)
+ sort order: +
+ Map-reduce partition columns: value (type: string)
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ tag: 0
+ value expressions: key (type: string)
+ auto parallelism: true
+ Path -> Alias:
+ -mr-10003default.src{} [s1]
+ Path -> Partition:
+ -mr-10003default.src{}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src
+ name: default.src
+ Truncated Path -> Alias:
+ -mr-10003default.src{} [s1]
+ Map 3
+ Map Operator Tree:
+ TableScan
+ alias: s2
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+ GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: false (type: boolean)
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: value (type: string)
+ sort order: +
+ Map-reduce partition columns: value (type: string)
+ Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+ tag: 1
+ value expressions: key (type: string)
+ auto parallelism: true
+ Path -> Alias:
+ -mr-10004default.src{} [s2]
+ Path -> Partition:
+ -mr-10004default.src{}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ COLUMN_STATS_ACCURATE true
+ bucket_count -1
+ columns key,value
+ columns.comments 'default','default'
+ columns.types string:string
+#### A masked pattern was here ####
+ name default.src
+ numFiles 1
+ numRows 500
+ rawDataSize 5312
+ serialization.ddl struct src { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 5812
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src
+ name: default.src
+ Truncated Path -> Alias:
+ -mr-10004default.src{} [s2]
+ Reducer 2
+ Execution mode: llap
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Merge Join Operator
+ condition map:
+ Inner Join 0 to 1
+ keys:
+ 0 value (type: string)
+ 1 value (type: string)
+ outputColumnNames: _col0, _col1, _col5, _col6
+ Position of Big Table: 0
+ Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1,_col2,_col3
+ columns.types string:string:string:string
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select * from src s1, src s2 where false and s1.value = s2.value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select * from src s1, src s2 where false and s1.value = s2.value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+PREHOOK: query: explain extended
+select count(1) from src where null = 1
+PREHOOK: type: QUERY
+POSTHOOK:
<TRUNCATED>