You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2015/09/18 01:48:58 UTC
[3/3] hive git commit: HIVE-11851 : LLAP: Test failures -
file.splitmetainfo file not found (Sergey Shelukhin) ADDENDUM
HIVE-11851 : LLAP: Test failures - file.splitmetainfo file not found (Sergey Shelukhin) ADDENDUM
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f2e12437
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f2e12437
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f2e12437
Branch: refs/heads/llap
Commit: f2e124370823693889dc5296170d5e5cb55177e9
Parents: 52a934f
Author: Sergey Shelukhin <se...@apache.org>
Authored: Thu Sep 17 16:49:39 2015 -0700
Committer: Sergey Shelukhin <se...@apache.org>
Committed: Thu Sep 17 16:49:39 2015 -0700
----------------------------------------------------------------------
.../clientpositive/llap/metadataonly1.q.out | 2050 +++++++++++++++++
.../clientpositive/llap/optimize_nullscan.q.out | 2159 ++++++++++++++++++
.../llap/vector_decimal_precision.q.out | 672 ++++++
3 files changed, 4881 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/f2e12437/ql/src/test/results/clientpositive/llap/metadataonly1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/metadataonly1.q.out b/ql/src/test/results/clientpositive/llap/metadataonly1.q.out
new file mode 100644
index 0000000..d569a23
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/metadataonly1.q.out
@@ -0,0 +1,2050 @@
+PREHOOK: query: CREATE TABLE TEST1(A INT, B DOUBLE) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@TEST1
+POSTHOOK: query: CREATE TABLE TEST1(A INT, B DOUBLE) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@TEST1
+PREHOOK: query: explain extended select max(ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select max(ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ max
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: max(_col0)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: string)
+ auto parallelism: false
+ Execution mode: llap
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: max(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types string
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select max(ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+#### A masked pattern was here ####
+POSTHOOK: query: select max(ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+#### A masked pattern was here ####
+NULL
+PREHOOK: query: alter table TEST1 add partition (ds='1')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test1
+POSTHOOK: query: alter table TEST1 add partition (ds='1')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test1
+POSTHOOK: Output: default@test1@ds=1
+PREHOOK: query: explain extended select max(ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select max(ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ max
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: max(_col0)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: string)
+ auto parallelism: false
+ Path -> Alias:
+ -mr-10004default.test1{ds=1} [test1]
+ Path -> Partition:
+ -mr-10004default.test1{ds=1}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ Truncated Path -> Alias:
+ -mr-10004default.test1{ds=1} [test1]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: max(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types string
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select max(ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test1@ds=1
+#### A masked pattern was here ####
+POSTHOOK: query: select max(ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test1@ds=1
+#### A masked pattern was here ####
+1
+PREHOOK: query: explain extended select count(distinct ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select count(distinct ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTIONDI
+ count
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count(DISTINCT _col0)
+ keys: _col0 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ tag: -1
+ auto parallelism: false
+ Path -> Alias:
+ -mr-10004default.test1{ds=1} [test1]
+ Path -> Partition:
+ -mr-10004default.test1{ds=1}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ Truncated Path -> Alias:
+ -mr-10004default.test1{ds=1} [test1]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(DISTINCT KEY._col0:0._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select count(distinct ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test1@ds=1
+#### A masked pattern was here ####
+POSTHOOK: query: select count(distinct ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test1@ds=1
+#### A masked pattern was here ####
+1
+PREHOOK: query: explain extended select count(ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select count(ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ count
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count(_col0)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: bigint)
+ auto parallelism: false
+ Execution mode: llap
+ Path -> Alias:
+#### A masked pattern was here ####
+ Path -> Partition:
+#### A masked pattern was here ####
+ Partition
+ base file name: ds=1
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ Truncated Path -> Alias:
+ /test1/ds=1 [test1]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select count(ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test1@ds=1
+#### A masked pattern was here ####
+POSTHOOK: query: select count(ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test1@ds=1
+#### A masked pattern was here ####
+0
+PREHOOK: query: alter table TEST1 add partition (ds='2')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test1
+POSTHOOK: query: alter table TEST1 add partition (ds='2')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test1
+POSTHOOK: Output: default@test1@ds=2
+PREHOOK: query: explain extended
+select count(*) from TEST1 a2 join (select max(ds) m from TEST1) b on a2.ds=b.m
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select count(*) from TEST1 a2 join (select max(ds) m from TEST1) b on a2.ds=b.m
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_JOIN
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ a2
+ TOK_SUBQUERY
+ TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ max
+ TOK_TABLE_OR_COL
+ ds
+ m
+ b
+ =
+ .
+ TOK_TABLE_OR_COL
+ a2
+ ds
+ .
+ TOK_TABLE_OR_COL
+ b
+ m
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTIONSTAR
+ count
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+ Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+ Reducer 5 <- Map 4 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: a2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ tag: 0
+ auto parallelism: true
+ Execution mode: llap
+ Path -> Alias:
+#### A masked pattern was here ####
+ Path -> Partition:
+#### A masked pattern was here ####
+ Partition
+ base file name: ds=1
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+#### A masked pattern was here ####
+ Partition
+ base file name: ds=2
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ Truncated Path -> Alias:
+ /test1/ds=1 [a2]
+ /test1/ds=2 [a2]
+ Map 4
+ Map Operator Tree:
+ TableScan
+ alias: a2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: max(_col0)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: string)
+ auto parallelism: false
+ Execution mode: llap
+ Path -> Alias:
+#### A masked pattern was here ####
+ Path -> Partition:
+#### A masked pattern was here ####
+ Partition
+ base file name: ds=1
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+#### A masked pattern was here ####
+ Partition
+ base file name: ds=2
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ Truncated Path -> Alias:
+ /test1/ds=1 [a2]
+ /test1/ds=2 [a2]
+ Reducer 2
+ Execution mode: llap
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Merge Join Operator
+ condition map:
+ Inner Join 0 to 1
+ keys:
+ 0 _col0 (type: string)
+ 1 _col0 (type: string)
+ Position of Big Table: 0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count()
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: bigint)
+ auto parallelism: false
+ Reducer 3
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+ Reducer 5
+ Execution mode: llap
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: max(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ isSamplingPred: false
+ predicate: _col0 is not null (type: boolean)
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ tag: 1
+ auto parallelism: true
+ Select Operator
+ expressions: _col0 (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: string)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Dynamic Partitioning Event Operator
+ Target Input: a2
+ Partition key expr: ds
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Target column: ds
+ Target Vertex: Map 1
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select count(*) from TEST1 a2 join (select max(ds) m from TEST1) b on a2.ds=b.m
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test1@ds=1
+PREHOOK: Input: default@test1@ds=2
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from TEST1 a2 join (select max(ds) m from TEST1) b on a2.ds=b.m
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test1@ds=1
+POSTHOOK: Input: default@test1@ds=2
+#### A masked pattern was here ####
+0
+PREHOOK: query: CREATE TABLE TEST2(A INT, B DOUBLE) partitioned by (ds string, hr string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@TEST2
+POSTHOOK: query: CREATE TABLE TEST2(A INT, B DOUBLE) partitioned by (ds string, hr string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@TEST2
+PREHOOK: query: alter table TEST2 add partition (ds='1', hr='1')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test2
+POSTHOOK: query: alter table TEST2 add partition (ds='1', hr='1')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test2
+POSTHOOK: Output: default@test2@ds=1/hr=1
+PREHOOK: query: alter table TEST2 add partition (ds='1', hr='2')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test2
+POSTHOOK: query: alter table TEST2 add partition (ds='1', hr='2')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test2
+POSTHOOK: Output: default@test2@ds=1/hr=2
+PREHOOK: query: alter table TEST2 add partition (ds='1', hr='3')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test2
+POSTHOOK: query: alter table TEST2 add partition (ds='1', hr='3')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test2
+POSTHOOK: Output: default@test2@ds=1/hr=3
+PREHOOK: query: explain extended select ds, count(distinct hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds, count(distinct hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST2
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_TABLE_OR_COL
+ ds
+ TOK_SELEXPR
+ TOK_FUNCTIONDI
+ count
+ TOK_TABLE_OR_COL
+ hr
+ TOK_GROUPBY
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string), hr (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count(DISTINCT _col1)
+ keys: _col0 (type: string), _col1 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string)
+ sort order: ++
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ tag: -1
+ auto parallelism: true
+ Path -> Alias:
+ -mr-10004default.test2{ds=1, hr=1} [test2]
+ -mr-10005default.test2{ds=1, hr=2} [test2]
+ -mr-10006default.test2{ds=1, hr=3} [test2]
+ Path -> Partition:
+ -mr-10004default.test2{ds=1, hr=1}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ -mr-10005default.test2{ds=1, hr=2}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 2
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ -mr-10006default.test2{ds=1, hr=3}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 3
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ Truncated Path -> Alias:
+ -mr-10004default.test2{ds=1, hr=1} [test2]
+ -mr-10005default.test2{ds=1, hr=2} [test2]
+ -mr-10006default.test2{ds=1, hr=3} [test2]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(DISTINCT KEY._col1:0._col0)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ columns.types string:bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select ds, count(distinct hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test2
+PREHOOK: Input: default@test2@ds=1/hr=1
+PREHOOK: Input: default@test2@ds=1/hr=2
+PREHOOK: Input: default@test2@ds=1/hr=3
+#### A masked pattern was here ####
+POSTHOOK: query: select ds, count(distinct hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test2
+POSTHOOK: Input: default@test2@ds=1/hr=1
+POSTHOOK: Input: default@test2@ds=1/hr=2
+POSTHOOK: Input: default@test2@ds=1/hr=3
+#### A masked pattern was here ####
+1 3
+PREHOOK: query: explain extended select ds, count(hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds, count(hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST2
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_TABLE_OR_COL
+ ds
+ TOK_SELEXPR
+ TOK_FUNCTION
+ count
+ TOK_TABLE_OR_COL
+ hr
+ TOK_GROUPBY
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string), hr (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count(_col1)
+ keys: _col0 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ tag: -1
+ value expressions: _col1 (type: bigint)
+ auto parallelism: true
+ Execution mode: llap
+ Path -> Alias:
+#### A masked pattern was here ####
+ Path -> Partition:
+#### A masked pattern was here ####
+ Partition
+ base file name: hr=1
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+#### A masked pattern was here ####
+ Partition
+ base file name: hr=2
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 2
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+#### A masked pattern was here ####
+ Partition
+ base file name: hr=3
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 3
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ Truncated Path -> Alias:
+ /test2/ds=1/hr=1 [test2]
+ /test2/ds=1/hr=2 [test2]
+ /test2/ds=1/hr=3 [test2]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ columns.types string:bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select ds, count(hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test2
+PREHOOK: Input: default@test2@ds=1/hr=1
+PREHOOK: Input: default@test2@ds=1/hr=2
+PREHOOK: Input: default@test2@ds=1/hr=3
+#### A masked pattern was here ####
+POSTHOOK: query: select ds, count(hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test2
+POSTHOOK: Input: default@test2@ds=1/hr=1
+POSTHOOK: Input: default@test2@ds=1/hr=2
+POSTHOOK: Input: default@test2@ds=1/hr=3
+#### A masked pattern was here ####
+PREHOOK: query: explain extended select max(ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select max(ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST1
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_FUNCTION
+ max
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: max(_col0)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ tag: -1
+ value expressions: _col0 (type: string)
+ auto parallelism: false
+ Path -> Alias:
+ -mr-10004default.test1{ds=1} [test1]
+ -mr-10005default.test1{ds=2} [test1]
+ Path -> Partition:
+ -mr-10004default.test1{ds=1}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ -mr-10005default.test1{ds=2}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test1
+ partition_columns ds
+ partition_columns.types string
+ serialization.ddl struct test1 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test1
+ name: default.test1
+ Truncated Path -> Alias:
+ -mr-10004default.test1{ds=1} [test1]
+ -mr-10005default.test1{ds=2} [test1]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: max(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types string
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select max(ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test1@ds=1
+PREHOOK: Input: default@test1@ds=2
+#### A masked pattern was here ####
+POSTHOOK: query: select max(ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test1@ds=1
+POSTHOOK: Input: default@test1@ds=2
+#### A masked pattern was here ####
+2
+PREHOOK: query: select distinct ds from srcpart
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select distinct ds from srcpart
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+2008-04-08
+2008-04-09
+PREHOOK: query: select min(ds),max(ds) from srcpart
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select min(ds),max(ds) from srcpart
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+2008-04-08 2008-04-09
+PREHOOK: query: -- HIVE-3594 URI encoding for temporary path
+alter table TEST2 add partition (ds='01:10:10', hr='01')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test2
+POSTHOOK: query: -- HIVE-3594 URI encoding for temporary path
+alter table TEST2 add partition (ds='01:10:10', hr='01')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test2
+POSTHOOK: Output: default@test2@ds=01%3A10%3A10/hr=01
+PREHOOK: query: alter table TEST2 add partition (ds='01:10:20', hr='02')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@test2
+POSTHOOK: query: alter table TEST2 add partition (ds='01:10:20', hr='02')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@test2
+POSTHOOK: Output: default@test2@ds=01%3A10%3A20/hr=02
+PREHOOK: query: explain extended select ds, count(distinct hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds, count(distinct hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+
+TOK_QUERY
+ TOK_FROM
+ TOK_TABREF
+ TOK_TABNAME
+ TEST2
+ TOK_INSERT
+ TOK_DESTINATION
+ TOK_DIR
+ TOK_TMP_FILE
+ TOK_SELECT
+ TOK_SELEXPR
+ TOK_TABLE_OR_COL
+ ds
+ TOK_SELEXPR
+ TOK_FUNCTIONDI
+ count
+ TOK_TABLE_OR_COL
+ hr
+ TOK_GROUPBY
+ TOK_TABLE_OR_COL
+ ds
+
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: test2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ GatherStats: false
+ Select Operator
+ expressions: ds (type: string), hr (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Group By Operator
+ aggregations: count(DISTINCT _col1)
+ keys: _col0 (type: string), _col1 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string)
+ sort order: ++
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ tag: -1
+ auto parallelism: true
+ Path -> Alias:
+ -mr-10004default.test2{ds=01_10_10, hr=01} [test2]
+ -mr-10005default.test2{ds=01_10_20, hr=02} [test2]
+ -mr-10006default.test2{ds=1, hr=1} [test2]
+ -mr-10007default.test2{ds=1, hr=2} [test2]
+ -mr-10008default.test2{ds=1, hr=3} [test2]
+ Path -> Partition:
+ -mr-10004default.test2{ds=01_10_10, hr=01}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 01:10:10
+ hr 01
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ -mr-10005default.test2{ds=01_10_20, hr=02}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 01:10:20
+ hr 02
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ -mr-10006default.test2{ds=1, hr=1}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 1
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ -mr-10007default.test2{ds=1, hr=2}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 2
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ -mr-10008default.test2{ds=1, hr=3}
+ Partition
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 1
+ hr 3
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns a,b
+ columns.comments
+ columns.types int:double
+#### A masked pattern was here ####
+ name default.test2
+ partition_columns ds/hr
+ partition_columns.types string:string
+ serialization.ddl struct test2 { i32 a, double b}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.test2
+ name: default.test2
+ Truncated Path -> Alias:
+ -mr-10004default.test2{ds=01_10_10, hr=01} [test2]
+ -mr-10005default.test2{ds=01_10_20, hr=02} [test2]
+ -mr-10006default.test2{ds=1, hr=1} [test2]
+ -mr-10007default.test2{ds=1, hr=2} [test2]
+ -mr-10008default.test2{ds=1, hr=3} [test2]
+ Reducer 2
+ Execution mode: uber
+ Needs Tagging: false
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(DISTINCT KEY._col1:0._col0)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
+ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ columns.types string:bigint
+ escape.delim \
+ hive.serialization.extend.additional.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: select ds, count(distinct hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test2
+PREHOOK: Input: default@test2@ds=01%3A10%3A10/hr=01
+PREHOOK: Input: default@test2@ds=01%3A10%3A20/hr=02
+PREHOOK: Input: default@test2@ds=1/hr=1
+PREHOOK: Input: default@test2@ds=1/hr=2
+PREHOOK: Input: default@test2@ds=1/hr=3
+#### A masked pattern was here ####
+POSTHOOK: query: select ds, count(distinct hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test2
+POSTHOOK: Input: default@test2@ds=01%3A10%3A10/hr=01
+POSTHOOK: Input: default@test2@ds=01%3A10%3A20/hr=02
+POSTHOOK: Input: default@test2@ds=1/hr=1
+POSTHOOK: Input: default@test2@ds=1/hr=2
+POSTHOOK: Input: default@test2@ds=1/hr=3
+#### A masked pattern was here ####
+01:10:10 1
+01:10:20 1
+1 3