You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/12/24 15:33:01 UTC

svn commit: r1647794 [3/3] - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/hooks/ test/queries/clientpositive/ test/results/clientpositive/ test/results/clientpositive/tez/

Added: hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,1127 @@
+PREHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed columns. merge.
+-- The following explains merge example used in this test case
+-- DML will generated 2 partitions
+-- ds=2008-04-08/hr=a1
+-- ds=2008-04-08/hr=b1
+-- without merge, each partition has more files
+-- ds=2008-04-08/hr=a1 has 2 files
+-- ds=2008-04-08/hr=b1 has 6 files
+-- with merge each partition has more files
+-- ds=2008-04-08/hr=a1 has 1 files
+-- ds=2008-04-08/hr=b1 has 4 files
+-- The following shows file size and name in each directory
+-- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+-- without merge
+-- 155 000000_0
+-- 155 000001_0
+-- with merge
+-- 254 000000_0
+-- hr=b1/key=103/value=val_103:
+-- without merge
+-- 99 000000_0
+-- 99 000001_0
+-- with merge
+-- 142 000001_0
+-- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+-- without merge
+-- 5181 000000_0
+-- 5181 000001_0
+-- with merge
+-- 5181 000000_0
+-- 5181 000001_0
+-- hr=b1/key=484/value=val_484
+-- without merge
+-- 87 000000_0
+-- 87 000001_0
+-- with merge
+-- 118 000002_0 
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+-- create a skewed table
+create table list_bucketing_dynamic_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed columns. merge.
+-- The following explains merge example used in this test case
+-- DML will generated 2 partitions
+-- ds=2008-04-08/hr=a1
+-- ds=2008-04-08/hr=b1
+-- without merge, each partition has more files
+-- ds=2008-04-08/hr=a1 has 2 files
+-- ds=2008-04-08/hr=b1 has 6 files
+-- with merge each partition has more files
+-- ds=2008-04-08/hr=a1 has 1 files
+-- ds=2008-04-08/hr=b1 has 4 files
+-- The following shows file size and name in each directory
+-- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+-- without merge
+-- 155 000000_0
+-- 155 000001_0
+-- with merge
+-- 254 000000_0
+-- hr=b1/key=103/value=val_103:
+-- without merge
+-- 99 000000_0
+-- 99 000001_0
+-- with merge
+-- 142 000001_0
+-- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+-- without merge
+-- 5181 000000_0
+-- 5181 000001_0
+-- with merge
+-- 5181 000000_0
+-- 5181 000001_0
+-- hr=b1/key=484/value=val_484
+-- without merge
+-- 87 000000_0
+-- 87 000001_0
+-- with merge
+-- 118 000002_0 
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+-- create a skewed table
+create table list_bucketing_dynamic_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_dynamic_part
+PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            srcpart
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_TAB
+            TOK_TABNAME
+               list_bucketing_dynamic_part
+            TOK_PARTSPEC
+               TOK_PARTVAL
+                  ds
+                  '2008-04-08'
+               TOK_PARTVAL
+                  hr
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               key
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               value
+         TOK_SELEXPR
+            TOK_FUNCTION
+               if
+               ==
+                  %
+                     TOK_TABLE_OR_COL
+                        key
+                     100
+                  0
+               'a1'
+               'b1'
+      TOK_WHERE
+         =
+            TOK_TABLE_OR_COL
+               ds
+            '2008-04-08'
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_dynamic_part
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_dynamic_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [$hdt$_0:srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [$hdt$_0:srcpart]
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
+POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_dynamic_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_dynamic_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+ds=2008-04-08/hr=a1
+ds=2008-04-08/hr=b1
+PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, a1]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	true                
+	numFiles            	2                   
+	numRows             	16                  
+	rawDataSize         	136                 
+	totalSize           	310                 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, b1]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	true                
+	numFiles            	6                   
+	numRows             	984                 
+	rawDataSize         	9488                
+	totalSize           	10734               
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            srcpart
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_TAB
+            TOK_TABNAME
+               list_bucketing_dynamic_part
+            TOK_PARTSPEC
+               TOK_PARTVAL
+                  ds
+                  '2008-04-08'
+               TOK_PARTVAL
+                  hr
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               key
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               value
+         TOK_SELEXPR
+            TOK_FUNCTION
+               if
+               ==
+                  %
+                     TOK_TABLE_OR_COL
+                        key
+                     100
+                  0
+               'a1'
+               'b1'
+      TOK_WHERE
+         =
+            TOK_TABLE_OR_COL
+               ds
+            '2008-04-08'
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
+  Stage-2 depends on stages: Stage-0
+  Stage-3
+  Stage-5
+  Stage-6 depends on stages: Stage-5
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_dynamic_part
+                      partition_columns hr
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_dynamic_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [$hdt$_0:srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [$hdt$_0:srcpart]
+
+  Stage: Stage-7
+    Conditional Operator
+
+  Stage: Stage-4
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+  Stage: Stage-3
+    Merge File Operator
+      Map Operator Tree:
+          RCFile Merge Operator
+      merge level: block
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_dynamic_part
+              partition_columns hr
+              partition_columns.types string:string
+              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+            name: default.list_bucketing_dynamic_part
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+
+  Stage: Stage-5
+    Merge File Operator
+      Map Operator Tree:
+          RCFile Merge Operator
+      merge level: block
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_dynamic_part
+              partition_columns hr
+              partition_columns.types string:string
+              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+            name: default.list_bucketing_dynamic_part
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+
+  Stage: Stage-6
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
+POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_dynamic_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_dynamic_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+ds=2008-04-08/hr=a1
+ds=2008-04-08/hr=b1
+PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, a1]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	true                
+	numFiles            	1                   
+	numRows             	16                  
+	rawDataSize         	136                 
+	totalSize           	254                 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, b1]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	true                
+	numFiles            	4                   
+	numRows             	984                 
+	rawDataSize         	9488                
+	totalSize           	10622               
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+1000
+PREHOOK: query: select count(*) from list_bucketing_dynamic_part
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from list_bucketing_dynamic_part
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+#### A masked pattern was here ####
+1000
+PREHOOK: query: explain extended
+select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            list_bucketing_dynamic_part
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_DIR
+            TOK_TMP_FILE
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_ALLCOLREF
+      TOK_WHERE
+         and
+            =
+               TOK_TABLE_OR_COL
+                  key
+               '484'
+            =
+               TOK_TABLE_OR_COL
+                  value
+               'val_484'
+
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr a1
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_dynamic_part
+              numFiles 1
+              numRows 16
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 136
+              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 254
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+            name: default.list_bucketing_dynamic_part
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr b1
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_dynamic_part
+              numFiles 4
+              numRows 984
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 9488
+              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 10622
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+            name: default.list_bucketing_dynamic_part
+      Processor Tree:
+        TableScan
+          alias: list_bucketing_dynamic_part
+          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
+            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+              ListSink
+
+PREHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+#### A masked pattern was here ####
+484	val_484	2008-04-08	b1
+484	val_484	2008-04-08	b1
+PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+484	val_484	2008-04-08	11
+484	val_484	2008-04-08	12
+PREHOOK: query: -- clean up
+drop table list_bucketing_dynamic_part
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Output: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- clean up
+drop table list_bucketing_dynamic_part
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Output: default@list_bucketing_dynamic_part

Added: hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.7.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.7.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.7.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.7.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,13 @@
+PREHOOK: query: -- explain plan json:  the query gets the formatted json output of the query plan of the hive query
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+EXPLAIN FORMATTED SELECT count(1) FROM src
+PREHOOK: type: QUERY
+POSTHOOK: query: -- explain plan json:  the query gets the formatted json output of the query plan of the hive query
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+EXPLAIN FORMATTED SELECT count(1) FROM src
+POSTHOOK: type: QUERY
+{"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Reduce Operator Tree:":{"Group By Operator":{"mode:":"mergepartial","aggregations:":["count(VALUE._col0)"],"outputColumnNames:":["_col0"],"children":{"Select Operator":{"expressions:":"_col0 (type: bigint)","outputColumnNames:":["_col0"],"children":{"File Output Operator":{"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","compressed:":"false","table:":{"serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe","input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"}}},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE"}},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE"}},"Map Operator Tree:":[{"TableScan":{"alias:":"src","children":{"Select Operator":{"expressions:":"1 (type: int)","outputColumnNames:":["_col0"],"children":{"Group By Operator":{"mode:":
 "hash","aggregations:":["count(_col0)"],"outputColumnNames:":["_col0"],"children":{"Reduce Output Operator":{"sort order:":"","value expressions:":"_col0 (type: bigint)","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE"}},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE"}},"Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE"}},"Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE"}}]}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{}}}}},"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"DEPENDENT STAGES":"Stage-1"}}}

Added: hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.8.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.8.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.8.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.8.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,13 @@
+PREHOOK: query: -- explain plan json:  the query gets the formatted json output of the query plan of the hive query
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+EXPLAIN FORMATTED SELECT count(1) FROM src
+PREHOOK: type: QUERY
+POSTHOOK: query: -- explain plan json:  the query gets the formatted json output of the query plan of the hive query
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+EXPLAIN FORMATTED SELECT count(1) FROM src
+POSTHOOK: type: QUERY
+{"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"DEPENDENT STAGES":"Stage-1"}},"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Select Operator":{"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","table:":{"input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}}},"outputColumnNames:":["_col0"],"expressions:":"_col0 (type: bigint)"}},"outputColumnNames:":["_col0"]}},"Map Operator Tree:":[{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size
 : 5312 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Select Operator":{"Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Group By Operator":{"aggregations:":["count(_col0)"],"mode:":"hash","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Reduce Output Operator":{"value expressions:":"_col0 (type: bigint)","sort order:":"","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE"}},"outputColumnNames:":["_col0"]}},"outputColumnNames:":["_col0"],"expressions:":"1 (type: int)"}}}}]}},"Stage-0":{"Fetch Operator":{"Processor Tree:":{"ListSink":{}},"limit:":"-1"}}}}

Added: hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,236 @@
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+EXPLAIN
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+EXPLAIN
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
+                  sort order: ++++
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+        Reducer 3 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: EXPLAIN FORMATTED
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN FORMATTED
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+#### A masked pattern was here ####
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	2	val_2
+0	val_0	2	val_2
+0	val_0	2	val_2
+0	val_0	4	val_4
+0	val_0	4	val_4
+0	val_0	4	val_4
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	8	val_8
+0	val_0	8	val_8
+0	val_0	8	val_8
+0	val_0	9	val_9
+0	val_0	9	val_9
+0	val_0	9	val_9
+2	val_2	0	val_0
+2	val_2	0	val_0
+2	val_2	0	val_0
+2	val_2	2	val_2
+2	val_2	4	val_4
+2	val_2	5	val_5
+2	val_2	5	val_5
+2	val_2	5	val_5
+2	val_2	8	val_8
+2	val_2	9	val_9
+4	val_4	0	val_0
+4	val_4	0	val_0
+4	val_4	0	val_0
+4	val_4	2	val_2
+4	val_4	4	val_4
+4	val_4	5	val_5
+4	val_4	5	val_5
+4	val_4	5	val_5
+4	val_4	8	val_8
+4	val_4	9	val_9
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	9	val_9
+5	val_5	9	val_9
+5	val_5	9	val_9
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	2	val_2
+8	val_8	4	val_4
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	8	val_8
+8	val_8	9	val_9
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	2	val_2
+9	val_9	4	val_4
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	8	val_8
+9	val_9	9	val_9

Added: hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,236 @@
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+EXPLAIN
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+EXPLAIN
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
+                  sort order: ++++
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+        Reducer 3 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: EXPLAIN FORMATTED
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN FORMATTED
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+#### A masked pattern was here ####
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	2	val_2
+0	val_0	2	val_2
+0	val_0	2	val_2
+0	val_0	4	val_4
+0	val_0	4	val_4
+0	val_0	4	val_4
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	8	val_8
+0	val_0	8	val_8
+0	val_0	8	val_8
+0	val_0	9	val_9
+0	val_0	9	val_9
+0	val_0	9	val_9
+2	val_2	0	val_0
+2	val_2	0	val_0
+2	val_2	0	val_0
+2	val_2	2	val_2
+2	val_2	4	val_4
+2	val_2	5	val_5
+2	val_2	5	val_5
+2	val_2	5	val_5
+2	val_2	8	val_8
+2	val_2	9	val_9
+4	val_4	0	val_0
+4	val_4	0	val_0
+4	val_4	0	val_0
+4	val_4	2	val_2
+4	val_4	4	val_4
+4	val_4	5	val_5
+4	val_4	5	val_5
+4	val_4	5	val_5
+4	val_4	8	val_8
+4	val_4	9	val_9
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	9	val_9
+5	val_5	9	val_9
+5	val_5	9	val_9
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	2	val_2
+8	val_8	4	val_4
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	8	val_8
+8	val_8	9	val_9
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	2	val_2
+9	val_9	4	val_4
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	8	val_8
+9	val_9	9	val_9