You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2012/12/05 12:59:26 UTC

svn commit: r1417374 [9/11] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/conf/ conf/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/ha...

Modified: hive/trunk/ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out?rev=1417374&r1=1417373&r2=1417374&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out Wed Dec  5 11:59:15 2012
@@ -11,16 +11,11 @@ PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_
 -- 1. pruner only pick up right directory
 -- 2. query result is right
 
--- create 1 table: fact_daily
--- 1. create a few partitions
--- 2. dfs move partition according to list bucketing structure (simulate DML) 
---    $/fact_daily/ds=1/hr=4/x=../y=..
---    notes: waste all partitions except ds=1 and hr=4 for list bucketing query test
--- 3. alter it to skewed table and set up location map
--- 4. list bucketing query
--- fact_daily (ds=1 and hr=4) will be used for list bucketing query	
-CREATE TABLE fact_daily(x int, y STRING) PARTITIONED BY (ds STRING, hr STRING)	
-#### A masked pattern was here ####
+-- create a skewed table
+create table fact_daily (key String, value String) 
+partitioned by (ds String, hr String) 
+skewed by (key, value) on (('484','val_484'),('238','val_238')) 
+stored as DIRECTORIES
 PREHOOK: type: CREATETABLE
 POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)	
 
@@ -35,127 +30,35 @@ POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR
 -- 1. pruner only pick up right directory
 -- 2. query result is right
 
--- create 1 table: fact_daily
--- 1. create a few partitions
--- 2. dfs move partition according to list bucketing structure (simulate DML) 
---    $/fact_daily/ds=1/hr=4/x=../y=..
---    notes: waste all partitions except ds=1 and hr=4 for list bucketing query test
--- 3. alter it to skewed table and set up location map
--- 4. list bucketing query
--- fact_daily (ds=1 and hr=4) will be used for list bucketing query	
-CREATE TABLE fact_daily(x int, y STRING) PARTITIONED BY (ds STRING, hr STRING)	
-#### A masked pattern was here ####
+-- create a skewed table
+create table fact_daily (key String, value String) 
+partitioned by (ds String, hr String) 
+skewed by (key, value) on (('484','val_484'),('238','val_238')) 
+stored as DIRECTORIES
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@fact_daily
-PREHOOK: query: -- create /fact_daily/ds=1/hr=1 directory	
-INSERT OVERWRITE TABLE fact_daily PARTITION (ds='1', hr='1')	
-SELECT key, value FROM src WHERE key=484
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@fact_daily@ds=1/hr=1
-POSTHOOK: query: -- create /fact_daily/ds=1/hr=1 directory	
-INSERT OVERWRITE TABLE fact_daily PARTITION (ds='1', hr='1')	
-SELECT key, value FROM src WHERE key=484
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@fact_daily@ds=1/hr=1
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- create /fact_daily/ds=1/hr=2 directory	
-INSERT OVERWRITE TABLE fact_daily PARTITION (ds='1', hr='2')	
-SELECT key, value FROM src WHERE key=369 or key=406
+PREHOOK: query: insert overwrite table fact_daily partition (ds = '1', hr = '4')
+select key, value from src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@fact_daily@ds=1/hr=2
-POSTHOOK: query: -- create /fact_daily/ds=1/hr=2 directory	
-INSERT OVERWRITE TABLE fact_daily PARTITION (ds='1', hr='2')	
-SELECT key, value FROM src WHERE key=369 or key=406
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@fact_daily@ds=1/hr=2
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- create /fact_daily/ds=1/hr=3 directory	
-INSERT OVERWRITE TABLE fact_daily PARTITION (ds='1', hr='3')	
-SELECT key, value FROM src WHERE key=238
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@fact_daily@ds=1/hr=3
-POSTHOOK: query: -- create /fact_daily/ds=1/hr=3 directory	
-INSERT OVERWRITE TABLE fact_daily PARTITION (ds='1', hr='3')	
-SELECT key, value FROM src WHERE key=238
+PREHOOK: Output: default@fact_daily@ds=1/hr=4
+POSTHOOK: query: insert overwrite table fact_daily partition (ds = '1', hr = '4')
+select key, value from src
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@fact_daily@ds=1/hr=3
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-#### A masked pattern was here ####
-PREHOOK: query: -- switch fact_daily to skewed table and point its location to /fact_daily/ds=1
-alter table fact_daily skewed by (x,y) on ((484,'val_484'),(238,'val_238'))
-PREHOOK: type: ALTERTABLE_SKEWED
-PREHOOK: Input: default@fact_daily
-PREHOOK: Output: default@fact_daily
-POSTHOOK: query: -- switch fact_daily to skewed table and point its location to /fact_daily/ds=1
-alter table fact_daily skewed by (x,y) on ((484,'val_484'),(238,'val_238'))
-POSTHOOK: type: ALTERTABLE_SKEWED
-POSTHOOK: Input: default@fact_daily
-POSTHOOK: Output: default@fact_daily
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: ALTER TABLE fact_daily ADD PARTITION (ds='1', hr='4')
-PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: default@fact_daily
-POSTHOOK: query: ALTER TABLE fact_daily ADD PARTITION (ds='1', hr='4')
-POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: default@fact_daily
-POSTHOOK: Output: default@fact_daily@ds=1/hr=4
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- set List Bucketing location map
-#### A masked pattern was here ####
-PREHOOK: type: ALTERTBLPART_SKEWED_LOCATION
-PREHOOK: Input: default@fact_daily
-PREHOOK: Output: default@fact_daily@ds=1/hr=4
-POSTHOOK: query: -- set List Bucketing location map
-#### A masked pattern was here ####
-POSTHOOK: type: ALTERTBLPART_SKEWED_LOCATION
-POSTHOOK: Input: default@fact_daily
-POSTHOOK: Input: default@fact_daily@ds=1/hr=4
 POSTHOOK: Output: default@fact_daily@ds=1/hr=4
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: describe formatted fact_daily PARTITION (ds = '1', hr='4')
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: describe formatted fact_daily PARTITION (ds = '1', hr='4')
 POSTHOOK: type: DESCTABLE
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 # col_name            	data_type           	comment             
 	 	 
-x                   	int                 	None                
-y                   	string              	None                
+key                 	string              	None                
+value               	string              	None                
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
@@ -171,6 +74,10 @@ Table:              	fact_daily         
 Protect Mode:       	None                	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	numFiles            	3                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	totalSize           	5812                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -181,51 +88,36 @@ Compressed:         	No                 
 Num Buckets:        	-1                  	 
 Bucket Columns:     	[]                  	 
 Sort Columns:       	[]                  	 
-Skewed Columns:     	[x, y]              	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
 Skewed Values:      	[[484, val_484], [238, val_238]]	 
 #### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/fact_daily/ds=1/hr=4/key=484/value=val_484, [238, val_238]=/fact_daily/ds=1/hr=4/key=238/value=val_238}	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
-PREHOOK: query: SELECT * FROM fact_daily WHERE ds='1' and hr='4'
+PREHOOK: query: SELECT count(1) FROM fact_daily WHERE ds='1' and hr='4'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM fact_daily WHERE ds='1' and hr='4'
+POSTHOOK: query: SELECT count(1) FROM fact_daily WHERE ds='1' and hr='4'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-369	val_369	1	4
-406	val_406	1	4
-369	val_369	1	4
-369	val_369	1	4
-406	val_406	1	4
-406	val_406	1	4
-406	val_406	1	4
-238	val_238	1	4
-238	val_238	1	4
-484	val_484	1	4
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+500
 PREHOOK: query: -- pruner only pick up skewed-value directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x FROM fact_daily WHERE ( ds='1' and hr='4') and (x=484 and y= 'val_484')
+explain extended SELECT key FROM fact_daily WHERE ( ds='1' and hr='4') and (key='484' and value= 'val_484')
 PREHOOK: type: QUERY
 POSTHOOK: query: -- pruner only pick up skewed-value directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x FROM fact_daily WHERE ( ds='1' and hr='4') and (x=484 and y= 'val_484')
+explain extended SELECT key FROM fact_daily WHERE ( ds='1' and hr='4') and (key='484' and value= 'val_484')
 POSTHOOK: type: QUERY
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (and (= (TOK_TABLE_OR_COL x) 484) (= (TOK_TABLE_OR_COL y) 'val_484'))))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (and (= (TOK_TABLE_OR_COL key) '484') (= (TOK_TABLE_OR_COL value) 'val_484'))))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -242,12 +134,12 @@ STAGE PLANS:
             Filter Operator
               isSamplingPred: false
               predicate:
-                  expr: ((x = 484) and (y = 'val_484'))
+                  expr: ((key = '484') and (value = 'val_484'))
                   type: boolean
               Select Operator
                 expressions:
-                      expr: x
-                      type: int
+                      expr: key
+                      type: string
                 outputColumnNames: _col0
                 File Output Operator
                   compressed: false
@@ -260,7 +152,7 @@ STAGE PLANS:
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
                         columns _col0
-                        columns.types int
+                        columns.types string
                         escape.delim \
                         serialization.format 1
                   TotalFiles: 1
@@ -272,7 +164,7 @@ STAGE PLANS:
       Path -> Partition:
 #### A masked pattern was here ####
           Partition
-            base file name: y=val_484
+            base file name: value=val_484
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             partition values:
@@ -280,19 +172,19 @@ STAGE PLANS:
               hr 4
             properties:
               bucket_count -1
-              columns x,y
-              columns.types int:string
+              columns key,value
+              columns.types string:string
 #### A masked pattern was here ####
               name default.fact_daily
               numFiles 3
-              numPartitions 3
-              numRows 10
+              numPartitions 1
+              numRows 500
               partition_columns ds/hr
-              rawDataSize 110
-              serialization.ddl struct fact_daily { i32 x, string y}
+              rawDataSize 5312
+              serialization.ddl struct fact_daily { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 120
+              totalSize 5812
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -300,25 +192,25 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
-                columns x,y
-                columns.types int:string
+                columns key,value
+                columns.types string:string
 #### A masked pattern was here ####
                 name default.fact_daily
                 numFiles 3
-                numPartitions 3
-                numRows 10
+                numPartitions 1
+                numRows 500
                 partition_columns ds/hr
-                rawDataSize 110
-                serialization.ddl struct fact_daily { i32 x, string y}
+                rawDataSize 5312
+                serialization.ddl struct fact_daily { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 120
+                totalSize 5812
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.fact_daily
             name: default.fact_daily
       Truncated Path -> Alias:
-        /fact_daily/ds=1/hr=4/x=484/y=val_484 [fact_daily]
+        /fact_daily/ds=1/hr=4/key=484/value=val_484 [fact_daily]
 
   Stage: Stage-0
     Fetch Operator
@@ -326,38 +218,30 @@ STAGE PLANS:
 
 
 PREHOOK: query: -- List Bucketing Query
-SELECT x FROM fact_daily WHERE ( ds='1' and hr='4') and (x=484 and y= 'val_484')
+SELECT key FROM fact_daily WHERE ( ds='1' and hr='4') and (key='484' and value= 'val_484')
 PREHOOK: type: QUERY
 PREHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
 POSTHOOK: query: -- List Bucketing Query
-SELECT x FROM fact_daily WHERE ( ds='1' and hr='4') and (x=484 and y= 'val_484')
+SELECT key FROM fact_daily WHERE ( ds='1' and hr='4') and (key='484' and value= 'val_484')
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 484
 PREHOOK: query: -- pruner only pick up skewed-value directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x,y FROM fact_daily WHERE ( ds='1' and hr='4') and (x=238 and y= 'val_238')
+explain extended SELECT key,value FROM fact_daily WHERE ( ds='1' and hr='4') and (key='238' and value= 'val_238')
 PREHOOK: type: QUERY
 POSTHOOK: query: -- pruner only pick up skewed-value directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x,y FROM fact_daily WHERE ( ds='1' and hr='4') and (x=238 and y= 'val_238')
+explain extended SELECT key,value FROM fact_daily WHERE ( ds='1' and hr='4') and (key='238' and value= 'val_238')
 POSTHOOK: type: QUERY
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x)) (TOK_SELEXPR (TOK_TABLE_OR_COL y))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (and (= (TOK_TABLE_OR_COL x) 238) (= (TOK_TABLE_OR_COL y) 'val_238'))))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (and (= (TOK_TABLE_OR_COL key) '238') (= (TOK_TABLE_OR_COL value) 'val_238'))))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -374,13 +258,13 @@ STAGE PLANS:
             Filter Operator
               isSamplingPred: false
               predicate:
-                  expr: ((x = 238) and (y = 'val_238'))
+                  expr: ((key = '238') and (value = 'val_238'))
                   type: boolean
               Select Operator
                 expressions:
-                      expr: x
-                      type: int
-                      expr: y
+                      expr: key
+                      type: string
+                      expr: value
                       type: string
                 outputColumnNames: _col0, _col1
                 File Output Operator
@@ -394,7 +278,7 @@ STAGE PLANS:
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
                         columns _col0,_col1
-                        columns.types int:string
+                        columns.types string:string
                         escape.delim \
                         serialization.format 1
                   TotalFiles: 1
@@ -406,7 +290,7 @@ STAGE PLANS:
       Path -> Partition:
 #### A masked pattern was here ####
           Partition
-            base file name: y=val_238
+            base file name: value=val_238
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             partition values:
@@ -414,19 +298,19 @@ STAGE PLANS:
               hr 4
             properties:
               bucket_count -1
-              columns x,y
-              columns.types int:string
+              columns key,value
+              columns.types string:string
 #### A masked pattern was here ####
               name default.fact_daily
               numFiles 3
-              numPartitions 3
-              numRows 10
+              numPartitions 1
+              numRows 500
               partition_columns ds/hr
-              rawDataSize 110
-              serialization.ddl struct fact_daily { i32 x, string y}
+              rawDataSize 5312
+              serialization.ddl struct fact_daily { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 120
+              totalSize 5812
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -434,25 +318,25 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
-                columns x,y
-                columns.types int:string
+                columns key,value
+                columns.types string:string
 #### A masked pattern was here ####
                 name default.fact_daily
                 numFiles 3
-                numPartitions 3
-                numRows 10
+                numPartitions 1
+                numRows 500
                 partition_columns ds/hr
-                rawDataSize 110
-                serialization.ddl struct fact_daily { i32 x, string y}
+                rawDataSize 5312
+                serialization.ddl struct fact_daily { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 120
+                totalSize 5812
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.fact_daily
             name: default.fact_daily
       Truncated Path -> Alias:
-        /fact_daily/ds=1/hr=4/x=238/y=val_238 [fact_daily]
+        /fact_daily/ds=1/hr=4/key=238/value=val_238 [fact_daily]
 
   Stage: Stage-0
     Fetch Operator
@@ -460,39 +344,31 @@ STAGE PLANS:
 
 
 PREHOOK: query: -- List Bucketing Query
-SELECT x,y FROM fact_daily WHERE ( ds='1' and hr='4') and (x=238 and y= 'val_238')
+SELECT key,value FROM fact_daily WHERE ( ds='1' and hr='4') and (key='238' and value= 'val_238')
 PREHOOK: type: QUERY
 PREHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
 POSTHOOK: query: -- List Bucketing Query
-SELECT x,y FROM fact_daily WHERE ( ds='1' and hr='4') and (x=238 and y= 'val_238')
+SELECT key,value FROM fact_daily WHERE ( ds='1' and hr='4') and (key='238' and value= 'val_238')
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 238	val_238
 238	val_238
 PREHOOK: query: -- pruner only pick up default directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x FROM fact_daily WHERE ( ds='1' and hr='4') and (y = "3")
+explain extended SELECT key FROM fact_daily WHERE ( ds='1' and hr='4') and (value = "3")
 PREHOOK: type: QUERY
 POSTHOOK: query: -- pruner only pick up default directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x FROM fact_daily WHERE ( ds='1' and hr='4') and (y = "3")
+explain extended SELECT key FROM fact_daily WHERE ( ds='1' and hr='4') and (value = "3")
 POSTHOOK: type: QUERY
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (= (TOK_TABLE_OR_COL y) "3")))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (= (TOK_TABLE_OR_COL value) "3")))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -509,12 +385,12 @@ STAGE PLANS:
             Filter Operator
               isSamplingPred: false
               predicate:
-                  expr: (y = '3')
+                  expr: (value = '3')
                   type: boolean
               Select Operator
                 expressions:
-                      expr: x
-                      type: int
+                      expr: key
+                      type: string
                 outputColumnNames: _col0
                 File Output Operator
                   compressed: false
@@ -527,7 +403,7 @@ STAGE PLANS:
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
                         columns _col0
-                        columns.types int
+                        columns.types string
                         escape.delim \
                         serialization.format 1
                   TotalFiles: 1
@@ -547,19 +423,19 @@ STAGE PLANS:
               hr 4
             properties:
               bucket_count -1
-              columns x,y
-              columns.types int:string
+              columns key,value
+              columns.types string:string
 #### A masked pattern was here ####
               name default.fact_daily
               numFiles 3
-              numPartitions 3
-              numRows 10
+              numPartitions 1
+              numRows 500
               partition_columns ds/hr
-              rawDataSize 110
-              serialization.ddl struct fact_daily { i32 x, string y}
+              rawDataSize 5312
+              serialization.ddl struct fact_daily { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 120
+              totalSize 5812
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -567,19 +443,19 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
-                columns x,y
-                columns.types int:string
+                columns key,value
+                columns.types string:string
 #### A masked pattern was here ####
                 name default.fact_daily
                 numFiles 3
-                numPartitions 3
-                numRows 10
+                numPartitions 1
+                numRows 500
                 partition_columns ds/hr
-                rawDataSize 110
-                serialization.ddl struct fact_daily { i32 x, string y}
+                rawDataSize 5312
+                serialization.ddl struct fact_daily { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 120
+                totalSize 5812
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.fact_daily
@@ -593,37 +469,29 @@ STAGE PLANS:
 
 
 PREHOOK: query: -- List Bucketing Query
-SELECT x FROM fact_daily WHERE ( ds='1' and hr='4') and (y = "3")
+SELECT key FROM fact_daily WHERE ( ds='1' and hr='4') and (value = "3")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
 POSTHOOK: query: -- List Bucketing Query
-SELECT x FROM fact_daily WHERE ( ds='1' and hr='4') and (y = "3")
+SELECT key FROM fact_daily WHERE ( ds='1' and hr='4') and (value = "3")
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: -- pruner only pick up default directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x,y FROM fact_daily WHERE ( ds='1' and hr='4') and x = 495
+explain extended SELECT key,value FROM fact_daily WHERE ( ds='1' and hr='4') and key = '495'
 PREHOOK: type: QUERY
 POSTHOOK: query: -- pruner only pick up default directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x,y FROM fact_daily WHERE ( ds='1' and hr='4') and x = 495
+explain extended SELECT key,value FROM fact_daily WHERE ( ds='1' and hr='4') and key = '495'
 POSTHOOK: type: QUERY
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x)) (TOK_SELEXPR (TOK_TABLE_OR_COL y))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (= (TOK_TABLE_OR_COL x) 495)))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (= (TOK_TABLE_OR_COL key) '495')))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -640,13 +508,13 @@ STAGE PLANS:
             Filter Operator
               isSamplingPred: false
               predicate:
-                  expr: (x = 495)
+                  expr: (key = '495')
                   type: boolean
               Select Operator
                 expressions:
-                      expr: x
-                      type: int
-                      expr: y
+                      expr: key
+                      type: string
+                      expr: value
                       type: string
                 outputColumnNames: _col0, _col1
                 File Output Operator
@@ -660,7 +528,7 @@ STAGE PLANS:
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
                         columns _col0,_col1
-                        columns.types int:string
+                        columns.types string:string
                         escape.delim \
                         serialization.format 1
                   TotalFiles: 1
@@ -680,19 +548,19 @@ STAGE PLANS:
               hr 4
             properties:
               bucket_count -1
-              columns x,y
-              columns.types int:string
+              columns key,value
+              columns.types string:string
 #### A masked pattern was here ####
               name default.fact_daily
               numFiles 3
-              numPartitions 3
-              numRows 10
+              numPartitions 1
+              numRows 500
               partition_columns ds/hr
-              rawDataSize 110
-              serialization.ddl struct fact_daily { i32 x, string y}
+              rawDataSize 5312
+              serialization.ddl struct fact_daily { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 120
+              totalSize 5812
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -700,19 +568,19 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
-                columns x,y
-                columns.types int:string
+                columns key,value
+                columns.types string:string
 #### A masked pattern was here ####
                 name default.fact_daily
                 numFiles 3
-                numPartitions 3
-                numRows 10
+                numPartitions 1
+                numRows 500
                 partition_columns ds/hr
-                rawDataSize 110
-                serialization.ddl struct fact_daily { i32 x, string y}
+                rawDataSize 5312
+                serialization.ddl struct fact_daily { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 120
+                totalSize 5812
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.fact_daily
@@ -726,21 +594,17 @@ STAGE PLANS:
 
 
 PREHOOK: query: -- List Bucketing Query
-SELECT x,y FROM fact_daily WHERE ( ds='1' and hr='4') and x = 369
+SELECT key,value FROM fact_daily WHERE ( ds='1' and hr='4') and key = '369'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
 POSTHOOK: query: -- List Bucketing Query
-SELECT x,y FROM fact_daily WHERE ( ds='1' and hr='4') and x = 369
+SELECT key,value FROM fact_daily WHERE ( ds='1' and hr='4') and key = '369'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 369	val_369
 369	val_369
 369	val_369

Modified: hive/trunk/ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out?rev=1417374&r1=1417373&r2=1417374&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out Wed Dec  5 11:59:15 2012
@@ -10,17 +10,11 @@ PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_
 -- 1. pruner only pick up right directory
 -- 2. query result is right
 
-
--- create 1 table: fact_daily
--- 1. create a few partitions
--- 2. dfs move partition according to list bucketing structure (simulate DML) 
---    $/fact_daily/ds=1/hr=4/x=../y=..
---    notes: waste all partitions except ds=1 and hr=4 for list bucketing query test
--- 3. alter it to skewed table and set up location map
--- 4. list bucketing query
--- fact_daily (ds=1 and hr=4) will be used for list bucketing query	
-CREATE TABLE fact_daily(x int, y STRING) PARTITIONED BY (ds STRING, hr STRING)	
-#### A masked pattern was here ####
+-- create a skewed table
+create table fact_daily (key String, value String) 
+partitioned by (ds String, hr String) 
+skewed by (key, value) on (('484','val_484'),('238','val_238')) 
+stored as DIRECTORIES
 PREHOOK: type: CREATETABLE
 POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)	
 
@@ -34,128 +28,35 @@ POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR
 -- 1. pruner only pick up right directory
 -- 2. query result is right
 
-
--- create 1 table: fact_daily
--- 1. create a few partitions
--- 2. dfs move partition according to list bucketing structure (simulate DML) 
---    $/fact_daily/ds=1/hr=4/x=../y=..
---    notes: waste all partitions except ds=1 and hr=4 for list bucketing query test
--- 3. alter it to skewed table and set up location map
--- 4. list bucketing query
--- fact_daily (ds=1 and hr=4) will be used for list bucketing query	
-CREATE TABLE fact_daily(x int, y STRING) PARTITIONED BY (ds STRING, hr STRING)	
-#### A masked pattern was here ####
+-- create a skewed table
+create table fact_daily (key String, value String) 
+partitioned by (ds String, hr String) 
+skewed by (key, value) on (('484','val_484'),('238','val_238')) 
+stored as DIRECTORIES
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@fact_daily
-PREHOOK: query: -- create /fact_daily/ds=1/hr=1 directory	
-INSERT OVERWRITE TABLE fact_daily PARTITION (ds='1', hr='1')	
-SELECT key, value FROM src WHERE key=484
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@fact_daily@ds=1/hr=1
-POSTHOOK: query: -- create /fact_daily/ds=1/hr=1 directory	
-INSERT OVERWRITE TABLE fact_daily PARTITION (ds='1', hr='1')	
-SELECT key, value FROM src WHERE key=484
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@fact_daily@ds=1/hr=1
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- create /fact_daily/ds=1/hr=2 directory	
-INSERT OVERWRITE TABLE fact_daily PARTITION (ds='1', hr='2')	
-SELECT key, value FROM src WHERE key=369 or key=406
+PREHOOK: query: insert overwrite table fact_daily partition (ds = '1', hr = '4')
+select key, value from src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@fact_daily@ds=1/hr=2
-POSTHOOK: query: -- create /fact_daily/ds=1/hr=2 directory	
-INSERT OVERWRITE TABLE fact_daily PARTITION (ds='1', hr='2')	
-SELECT key, value FROM src WHERE key=369 or key=406
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@fact_daily@ds=1/hr=2
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- create /fact_daily/ds=1/hr=3 directory	
-INSERT OVERWRITE TABLE fact_daily PARTITION (ds='1', hr='3')	
-SELECT key, value FROM src WHERE key=238
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@fact_daily@ds=1/hr=3
-POSTHOOK: query: -- create /fact_daily/ds=1/hr=3 directory	
-INSERT OVERWRITE TABLE fact_daily PARTITION (ds='1', hr='3')	
-SELECT key, value FROM src WHERE key=238
+PREHOOK: Output: default@fact_daily@ds=1/hr=4
+POSTHOOK: query: insert overwrite table fact_daily partition (ds = '1', hr = '4')
+select key, value from src
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@fact_daily@ds=1/hr=3
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-#### A masked pattern was here ####
-PREHOOK: query: -- switch fact_daily to skewed table and point its location to /fact_daily/ds=1
-alter table fact_daily skewed by (x,y) on ((484,'val_484'),(238,'val_238'))
-PREHOOK: type: ALTERTABLE_SKEWED
-PREHOOK: Input: default@fact_daily
-PREHOOK: Output: default@fact_daily
-POSTHOOK: query: -- switch fact_daily to skewed table and point its location to /fact_daily/ds=1
-alter table fact_daily skewed by (x,y) on ((484,'val_484'),(238,'val_238'))
-POSTHOOK: type: ALTERTABLE_SKEWED
-POSTHOOK: Input: default@fact_daily
-POSTHOOK: Output: default@fact_daily
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: ALTER TABLE fact_daily ADD PARTITION (ds='1', hr='4')
-PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: default@fact_daily
-POSTHOOK: query: ALTER TABLE fact_daily ADD PARTITION (ds='1', hr='4')
-POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: default@fact_daily
-POSTHOOK: Output: default@fact_daily@ds=1/hr=4
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- set List Bucketing location map
-#### A masked pattern was here ####
-PREHOOK: type: ALTERTBLPART_SKEWED_LOCATION
-PREHOOK: Input: default@fact_daily
-PREHOOK: Output: default@fact_daily@ds=1/hr=4
-POSTHOOK: query: -- set List Bucketing location map
-#### A masked pattern was here ####
-POSTHOOK: type: ALTERTBLPART_SKEWED_LOCATION
-POSTHOOK: Input: default@fact_daily
-POSTHOOK: Input: default@fact_daily@ds=1/hr=4
 POSTHOOK: Output: default@fact_daily@ds=1/hr=4
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: describe formatted fact_daily PARTITION (ds = '1', hr='4')
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: describe formatted fact_daily PARTITION (ds = '1', hr='4')
 POSTHOOK: type: DESCTABLE
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 # col_name            	data_type           	comment             
 	 	 
-x                   	int                 	None                
-y                   	string              	None                
+key                 	string              	None                
+value               	string              	None                
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
@@ -171,6 +72,10 @@ Table:              	fact_daily         
 Protect Mode:       	None                	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	numFiles            	3                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	totalSize           	5812                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -181,51 +86,36 @@ Compressed:         	No                 
 Num Buckets:        	-1                  	 
 Bucket Columns:     	[]                  	 
 Sort Columns:       	[]                  	 
-Skewed Columns:     	[x, y]              	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
 Skewed Values:      	[[484, val_484], [238, val_238]]	 
 #### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/fact_daily/ds=1/hr=4/key=484/value=val_484, [238, val_238]=/fact_daily/ds=1/hr=4/key=238/value=val_238}	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
-PREHOOK: query: SELECT * FROM fact_daily WHERE ds='1' and hr='4'
+PREHOOK: query: SELECT count(1) FROM fact_daily WHERE ds='1' and hr='4'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM fact_daily WHERE ds='1' and hr='4'
+POSTHOOK: query: SELECT count(1) FROM fact_daily WHERE ds='1' and hr='4'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-369	val_369	1	4
-406	val_406	1	4
-369	val_369	1	4
-369	val_369	1	4
-406	val_406	1	4
-406	val_406	1	4
-406	val_406	1	4
-238	val_238	1	4
-238	val_238	1	4
-484	val_484	1	4
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+500
 PREHOOK: query: -- pruner only pick up default directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x,y FROM fact_daily WHERE ds='1' and hr='4' and y= 'val_484'
+explain extended SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and value= 'val_484'
 PREHOOK: type: QUERY
 POSTHOOK: query: -- pruner only pick up default directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x,y FROM fact_daily WHERE ds='1' and hr='4' and y= 'val_484'
+explain extended SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and value= 'val_484'
 POSTHOOK: type: QUERY
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x)) (TOK_SELEXPR (TOK_TABLE_OR_COL y))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (= (TOK_TABLE_OR_COL y) 'val_484')))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (= (TOK_TABLE_OR_COL value) 'val_484')))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -242,13 +132,13 @@ STAGE PLANS:
             Filter Operator
               isSamplingPred: false
               predicate:
-                  expr: (y = 'val_484')
+                  expr: (value = 'val_484')
                   type: boolean
               Select Operator
                 expressions:
-                      expr: x
-                      type: int
-                      expr: y
+                      expr: key
+                      type: string
+                      expr: value
                       type: string
                 outputColumnNames: _col0, _col1
                 File Output Operator
@@ -262,7 +152,7 @@ STAGE PLANS:
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
                         columns _col0,_col1
-                        columns.types int:string
+                        columns.types string:string
                         escape.delim \
                         serialization.format 1
                   TotalFiles: 1
@@ -282,19 +172,19 @@ STAGE PLANS:
               hr 4
             properties:
               bucket_count -1
-              columns x,y
-              columns.types int:string
+              columns key,value
+              columns.types string:string
 #### A masked pattern was here ####
               name default.fact_daily
               numFiles 3
-              numPartitions 3
-              numRows 10
+              numPartitions 1
+              numRows 500
               partition_columns ds/hr
-              rawDataSize 110
-              serialization.ddl struct fact_daily { i32 x, string y}
+              rawDataSize 5312
+              serialization.ddl struct fact_daily { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 120
+              totalSize 5812
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -302,26 +192,26 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
-                columns x,y
-                columns.types int:string
+                columns key,value
+                columns.types string:string
 #### A masked pattern was here ####
                 name default.fact_daily
                 numFiles 3
-                numPartitions 3
-                numRows 10
+                numPartitions 1
+                numRows 500
                 partition_columns ds/hr
-                rawDataSize 110
-                serialization.ddl struct fact_daily { i32 x, string y}
+                rawDataSize 5312
+                serialization.ddl struct fact_daily { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 120
+                totalSize 5812
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.fact_daily
             name: default.fact_daily
 #### A masked pattern was here ####
           Partition
-            base file name: y=val_484
+            base file name: value=val_484
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             partition values:
@@ -329,19 +219,19 @@ STAGE PLANS:
               hr 4
             properties:
               bucket_count -1
-              columns x,y
-              columns.types int:string
+              columns key,value
+              columns.types string:string
 #### A masked pattern was here ####
               name default.fact_daily
               numFiles 3
-              numPartitions 3
-              numRows 10
+              numPartitions 1
+              numRows 500
               partition_columns ds/hr
-              rawDataSize 110
-              serialization.ddl struct fact_daily { i32 x, string y}
+              rawDataSize 5312
+              serialization.ddl struct fact_daily { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 120
+              totalSize 5812
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -349,26 +239,26 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
-                columns x,y
-                columns.types int:string
+                columns key,value
+                columns.types string:string
 #### A masked pattern was here ####
                 name default.fact_daily
                 numFiles 3
-                numPartitions 3
-                numRows 10
+                numPartitions 1
+                numRows 500
                 partition_columns ds/hr
-                rawDataSize 110
-                serialization.ddl struct fact_daily { i32 x, string y}
+                rawDataSize 5312
+                serialization.ddl struct fact_daily { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 120
+                totalSize 5812
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.fact_daily
             name: default.fact_daily
       Truncated Path -> Alias:
         /fact_daily/ds=1/hr=4/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME [fact_daily]
-        /fact_daily/ds=1/hr=4/x=484/y=val_484 [fact_daily]
+        /fact_daily/ds=1/hr=4/key=484/value=val_484 [fact_daily]
 
   Stage: Stage-0
     Fetch Operator
@@ -376,38 +266,30 @@ STAGE PLANS:
 
 
 PREHOOK: query: -- List Bucketing Query
-SELECT x,y FROM fact_daily WHERE ds='1' and hr='4' and y= 'val_484'
+SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and value= 'val_484'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
 POSTHOOK: query: -- List Bucketing Query
-SELECT x,y FROM fact_daily WHERE ds='1' and hr='4' and y= 'val_484'
+SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and value= 'val_484'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 484	val_484
 PREHOOK: query: -- pruner only pick up default directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x FROM fact_daily WHERE ds='1' and hr='4' and x= 406
+explain extended SELECT key FROM fact_daily WHERE ds='1' and hr='4' and key= '406'
 PREHOOK: type: QUERY
 POSTHOOK: query: -- pruner only pick up default directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x FROM fact_daily WHERE ds='1' and hr='4' and x= 406
+explain extended SELECT key FROM fact_daily WHERE ds='1' and hr='4' and key= '406'
 POSTHOOK: type: QUERY
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (= (TOK_TABLE_OR_COL x) 406)))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (= (TOK_TABLE_OR_COL key) '406')))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -424,12 +306,12 @@ STAGE PLANS:
             Filter Operator
               isSamplingPred: false
               predicate:
-                  expr: (x = 406)
+                  expr: (key = '406')
                   type: boolean
               Select Operator
                 expressions:
-                      expr: x
-                      type: int
+                      expr: key
+                      type: string
                 outputColumnNames: _col0
                 File Output Operator
                   compressed: false
@@ -442,7 +324,7 @@ STAGE PLANS:
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
                         columns _col0
-                        columns.types int
+                        columns.types string
                         escape.delim \
                         serialization.format 1
                   TotalFiles: 1
@@ -462,19 +344,19 @@ STAGE PLANS:
               hr 4
             properties:
               bucket_count -1
-              columns x,y
-              columns.types int:string
+              columns key,value
+              columns.types string:string
 #### A masked pattern was here ####
               name default.fact_daily
               numFiles 3
-              numPartitions 3
-              numRows 10
+              numPartitions 1
+              numRows 500
               partition_columns ds/hr
-              rawDataSize 110
-              serialization.ddl struct fact_daily { i32 x, string y}
+              rawDataSize 5312
+              serialization.ddl struct fact_daily { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 120
+              totalSize 5812
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -482,19 +364,19 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
-                columns x,y
-                columns.types int:string
+                columns key,value
+                columns.types string:string
 #### A masked pattern was here ####
                 name default.fact_daily
                 numFiles 3
-                numPartitions 3
-                numRows 10
+                numPartitions 1
+                numRows 500
                 partition_columns ds/hr
-                rawDataSize 110
-                serialization.ddl struct fact_daily { i32 x, string y}
+                rawDataSize 5312
+                serialization.ddl struct fact_daily { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 120
+                totalSize 5812
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.fact_daily
@@ -508,41 +390,33 @@ STAGE PLANS:
 
 
 PREHOOK: query: -- List Bucketing Query
-SELECT x,y FROM fact_daily WHERE ds='1' and hr='4' and x= 406
+SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and key= '406'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
 POSTHOOK: query: -- List Bucketing Query
-SELECT x,y FROM fact_daily WHERE ds='1' and hr='4' and x= 406
+SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and key= '406'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 406	val_406
 406	val_406
 406	val_406
 406	val_406
 PREHOOK: query: -- pruner only pick up skewed-value directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x,y FROM fact_daily WHERE ds='1' and hr='4' and ( (x=484 and y ='val_484')  or (x=238 and y= 'val_238'))
+explain extended SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and ( (key='484' and value ='val_484')  or (key='238' and value= 'val_238'))
 PREHOOK: type: QUERY
 POSTHOOK: query: -- pruner only pick up skewed-value directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT x,y FROM fact_daily WHERE ds='1' and hr='4' and ( (x=484 and y ='val_484')  or (x=238 and y= 'val_238'))
+explain extended SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and ( (key='484' and value ='val_484')  or (key='238' and value= 'val_238'))
 POSTHOOK: type: QUERY
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x)) (TOK_SELEXPR (TOK_TABLE_OR_COL y))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (or (and (= (TOK_TABLE_OR_COL x) 484) (= (TOK_TABLE_OR_COL y) 'val_484')) (and (= (TOK_TABLE_OR_COL x) 238) (= (TOK_TABLE_OR_COL y) 'val_238')))))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME fact_daily))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '1') (= (TOK_TABLE_OR_COL hr) '4')) (or (and (= (TOK_TABLE_OR_COL key) '484') (= (TOK_TABLE_OR_COL value) 'val_484')) (and (= (TOK_TABLE_OR_COL key) '238') (= (TOK_TABLE_OR_COL value) 'val_238')))))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -559,13 +433,13 @@ STAGE PLANS:
             Filter Operator
               isSamplingPred: false
               predicate:
-                  expr: (((x = 484) and (y = 'val_484')) or ((x = 238) and (y = 'val_238')))
+                  expr: (((key = '484') and (value = 'val_484')) or ((key = '238') and (value = 'val_238')))
                   type: boolean
               Select Operator
                 expressions:
-                      expr: x
-                      type: int
-                      expr: y
+                      expr: key
+                      type: string
+                      expr: value
                       type: string
                 outputColumnNames: _col0, _col1
                 File Output Operator
@@ -579,7 +453,7 @@ STAGE PLANS:
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
                         columns _col0,_col1
-                        columns.types int:string
+                        columns.types string:string
                         escape.delim \
                         serialization.format 1
                   TotalFiles: 1
@@ -591,7 +465,7 @@ STAGE PLANS:
       Path -> Partition:
 #### A masked pattern was here ####
           Partition
-            base file name: y=val_238
+            base file name: value=val_238
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             partition values:
@@ -599,19 +473,19 @@ STAGE PLANS:
               hr 4
             properties:
               bucket_count -1
-              columns x,y
-              columns.types int:string
+              columns key,value
+              columns.types string:string
 #### A masked pattern was here ####
               name default.fact_daily
               numFiles 3
-              numPartitions 3
-              numRows 10
+              numPartitions 1
+              numRows 500
               partition_columns ds/hr
-              rawDataSize 110
-              serialization.ddl struct fact_daily { i32 x, string y}
+              rawDataSize 5312
+              serialization.ddl struct fact_daily { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 120
+              totalSize 5812
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -619,26 +493,26 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
-                columns x,y
-                columns.types int:string
+                columns key,value
+                columns.types string:string
 #### A masked pattern was here ####
                 name default.fact_daily
                 numFiles 3
-                numPartitions 3
-                numRows 10
+                numPartitions 1
+                numRows 500
                 partition_columns ds/hr
-                rawDataSize 110
-                serialization.ddl struct fact_daily { i32 x, string y}
+                rawDataSize 5312
+                serialization.ddl struct fact_daily { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 120
+                totalSize 5812
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.fact_daily
             name: default.fact_daily
 #### A masked pattern was here ####
           Partition
-            base file name: y=val_484
+            base file name: value=val_484
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             partition values:
@@ -646,19 +520,19 @@ STAGE PLANS:
               hr 4
             properties:
               bucket_count -1
-              columns x,y
-              columns.types int:string
+              columns key,value
+              columns.types string:string
 #### A masked pattern was here ####
               name default.fact_daily
               numFiles 3
-              numPartitions 3
-              numRows 10
+              numPartitions 1
+              numRows 500
               partition_columns ds/hr
-              rawDataSize 110
-              serialization.ddl struct fact_daily { i32 x, string y}
+              rawDataSize 5312
+              serialization.ddl struct fact_daily { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 120
+              totalSize 5812
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -666,26 +540,26 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
-                columns x,y
-                columns.types int:string
+                columns key,value
+                columns.types string:string
 #### A masked pattern was here ####
                 name default.fact_daily
                 numFiles 3
-                numPartitions 3
-                numRows 10
+                numPartitions 1
+                numRows 500
                 partition_columns ds/hr
-                rawDataSize 110
-                serialization.ddl struct fact_daily { i32 x, string y}
+                rawDataSize 5312
+                serialization.ddl struct fact_daily { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 120
+                totalSize 5812
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.fact_daily
             name: default.fact_daily
       Truncated Path -> Alias:
-        /fact_daily/ds=1/hr=4/x=238/y=val_238 [fact_daily]
-        /fact_daily/ds=1/hr=4/x=484/y=val_484 [fact_daily]
+        /fact_daily/ds=1/hr=4/key=238/value=val_238 [fact_daily]
+        /fact_daily/ds=1/hr=4/key=484/value=val_484 [fact_daily]
 
   Stage: Stage-0
     Fetch Operator
@@ -693,21 +567,17 @@ STAGE PLANS:
 
 
 PREHOOK: query: -- List Bucketing Query
-SELECT x,y FROM fact_daily WHERE ds='1' and hr='4' and ( (x=484 and y ='val_484')  or (x=238 and y= 'val_238'))
+SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and ( (key='484' and value ='val_484')  or (key='238' and value= 'val_238'))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
 POSTHOOK: query: -- List Bucketing Query
-SELECT x,y FROM fact_daily WHERE ds='1' and hr='4' and ( (x=484 and y ='val_484')  or (x=238 and y= 'val_238'))
+SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and ( (key='484' and value ='val_484')  or (key='238' and value= 'val_238'))
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@fact_daily@ds=1/hr=4
 #### A masked pattern was here ####
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 238	val_238
 238	val_238
 484	val_484
@@ -721,9 +591,5 @@ drop table fact_daily
 POSTHOOK: type: DROPTABLE
 POSTHOOK: Input: default@fact_daily
 POSTHOOK: Output: default@fact_daily
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=1).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=2).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).x EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=3).y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: fact_daily PARTITION(ds=1,hr=4).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]