You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2014/04/28 17:40:32 UTC

svn commit: r1590681 [3/3] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/common/ metastore/src/java/org/apache/hadoop/hive/metastore/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientn...

Modified: hive/trunk/ql/src/test/results/clientpositive/groupby_sort_1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/groupby_sort_1.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
Files hive/trunk/ql/src/test/results/clientpositive/groupby_sort_1.q.out (original) and hive/trunk/ql/src/test/results/clientpositive/groupby_sort_1.q.out Mon Apr 28 15:40:31 2014 differ

Modified: hive/trunk/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
Files hive/trunk/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out (original) and hive/trunk/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out Mon Apr 28 15:40:31 2014 differ

Modified: hive/trunk/ql/src/test/results/clientpositive/infer_bucket_sort_list_bucket.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/infer_bucket_sort_list_bucket.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/infer_bucket_sort_list_bucket.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/infer_bucket_sort_list_bucket.q.out Mon Apr 28 15:40:31 2014
@@ -150,7 +150,7 @@ Partition Parameters:	 	 
 	numFiles            	1                   
 	numRows             	309                 
 	rawDataSize         	1482                
-	totalSize           	136                 
+	totalSize           	1791                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
Files hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.out (original) and hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.out Mon Apr 28 15:40:31 2014 differ

Modified: hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
Files hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out (original) and hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out Mon Apr 28 15:40:31 2014 differ

Modified: hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_8.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_8.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
Files hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_8.q.out (original) and hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_8.q.out Mon Apr 28 15:40:31 2014 differ

Modified: hive/trunk/ql/src/test/results/clientpositive/mapjoin_test_outer.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/mapjoin_test_outer.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/mapjoin_test_outer.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/mapjoin_test_outer.q.out Mon Apr 28 15:40:31 2014
@@ -1135,7 +1135,7 @@ STAGE PLANS:
         src1 
           Fetch Operator
             limit: -1
-        src2 
+        src3 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
@@ -1152,10 +1152,10 @@ STAGE PLANS:
                 0 key (type: string)
                 1 key (type: string)
                 2 key (type: string)
-        src2 
+        src3 
           TableScan
-            alias: src2
-            Statistics: Num rows: 1 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+            alias: src3
+            Statistics: Num rows: 9 Data size: 40 Basic stats: COMPLETE Column stats: NONE
             HashTable Sink Operator
               condition expressions:
                 0 {key} {value}
@@ -1170,8 +1170,8 @@ STAGE PLANS:
     Map Reduce
       Map Operator Tree:
           TableScan
-            alias: src3
-            Statistics: Num rows: 9 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+            alias: src2
+            Statistics: Num rows: 1 Data size: 13 Basic stats: COMPLETE Column stats: NONE
             Map Join Operator
               condition map:
                    Right Outer Join0 to 1

Modified: hive/trunk/ql/src/test/results/clientpositive/nullformatCTAS.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/nullformatCTAS.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/nullformatCTAS.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/nullformatCTAS.q.out Mon Apr 28 15:40:31 2014
@@ -168,10 +168,10 @@ LOCATION
 #### A masked pattern was here ####
 TBLPROPERTIES (
   'numFiles'='1', 
-  'COLUMN_STATS_ACCURATE'='true', 
 #### A masked pattern was here ####
-  'numRows'='10', 
+  'COLUMN_STATS_ACCURATE'='true', 
   'totalSize'='80', 
+  'numRows'='10', 
   'rawDataSize'='70')
 1.01
 1.01

Modified: hive/trunk/ql/src/test/results/clientpositive/nullgroup3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/nullgroup3.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/nullgroup3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/nullgroup3.q.out Mon Apr 28 15:40:31 2014
@@ -127,9 +127,9 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: tstparttbl2
-            Statistics: Num rows: 0 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 16 Basic stats: PARTIAL Column stats: COMPLETE
             Select Operator
-              Statistics: Num rows: 0 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+              Statistics: Num rows: 0 Data size: 16 Basic stats: PARTIAL Column stats: COMPLETE
               Group By Operator
                 aggregations: count(1)
                 mode: hash
@@ -319,9 +319,9 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: tstparttbl2
-            Statistics: Num rows: 0 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 16 Basic stats: PARTIAL Column stats: COMPLETE
             Select Operator
-              Statistics: Num rows: 0 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+              Statistics: Num rows: 0 Data size: 16 Basic stats: PARTIAL Column stats: COMPLETE
               Group By Operator
                 aggregations: count(1)
                 mode: hash

Modified: hive/trunk/ql/src/test/results/clientpositive/orc_createas1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/orc_createas1.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/orc_createas1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/orc_createas1.q.out Mon Apr 28 15:40:31 2014
@@ -178,19 +178,19 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_createas1b
-            Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 500 Data size: 88318 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: key (type: string), value (type: string)
               outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 500 Data size: 88318 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: string)
                 sort order: +
-                Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 500 Data size: 88318 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col0 (type: string), _col1 (type: string)
       Reduce Operator Tree:
         Extract
-          Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 500 Data size: 88318 Basic stats: COMPLETE Column stats: NONE
           Limit
             Number of rows: 5
             Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE

Modified: hive/trunk/ql/src/test/results/clientpositive/ppd_join4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ppd_join4.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ppd_join4.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/ppd_join4.q.out Mon Apr 28 15:40:31 2014
@@ -55,7 +55,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: test_tbl
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 0 Data size: 8 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: ((name = 'c') and (id = 'a')) (type: boolean)
               Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
@@ -83,7 +83,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: t3
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 0 Data size: 8 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (id = 'a') (type: boolean)
               Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE

Modified: hive/trunk/ql/src/test/results/clientpositive/select_dummy_source.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/select_dummy_source.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/select_dummy_source.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/select_dummy_source.q.out Mon Apr 28 15:40:31 2014
@@ -15,14 +15,14 @@ STAGE PLANS:
           TableScan
             alias: _dummy_table
             Row Limit Per Split: 1
-            Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
             Select Operator
               expressions: 'a' (type: string), 100 (type: int)
               outputColumnNames: _col0, _col1
-              Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+              Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -60,14 +60,14 @@ STAGE PLANS:
           TableScan
             alias: _dummy_table
             Row Limit Per Split: 1
-            Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
             Select Operator
               expressions: (1 + 1) (type: int)
               outputColumnNames: _col0
-              Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+              Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -105,17 +105,17 @@ STAGE PLANS:
           TableScan
             alias: _dummy_table
             Row Limit Per Split: 1
-            Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
             Select Operator
               expressions: array('a','b') (type: array<string>)
               outputColumnNames: _col0
-              Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+              Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
               UDTF Operator
-                Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
                 function name: explode
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+                  Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -152,11 +152,11 @@ STAGE PLANS:
         TableScan
           alias: _dummy_table
           Row Limit Per Split: 1
-          Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+          Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
           Select Operator
             expressions: 'a' (type: string), 100 (type: int)
             outputColumnNames: _col0, _col1
-            Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
             ListSink
 
 PREHOOK: query: select 'a', 100
@@ -185,11 +185,11 @@ STAGE PLANS:
         TableScan
           alias: _dummy_table
           Row Limit Per Split: 1
-          Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+          Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
           Select Operator
             expressions: (1 + 1) (type: int)
             outputColumnNames: _col0
-            Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
             ListSink
 
 PREHOOK: query: select 1 + 1
@@ -218,17 +218,17 @@ STAGE PLANS:
           TableScan
             alias: _dummy_table
             Row Limit Per Split: 1
-            Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
             Select Operator
               expressions: array('a','b') (type: array<string>)
               outputColumnNames: _col0
-              Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+              Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
               UDTF Operator
-                Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
                 function name: explode
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+                  Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -267,14 +267,14 @@ STAGE PLANS:
           TableScan
             alias: _dummy_table
             Row Limit Per Split: 1
-            Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
             Select Operator
               expressions: (2 + 3) (type: int), (1 + 2) (type: int)
               outputColumnNames: _col0, _col1
-              Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+              Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/trunk/ql/src/test/results/clientpositive/show_create_table_alter.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_create_table_alter.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/show_create_table_alter.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/show_create_table_alter.q.out Mon Apr 28 15:40:31 2014
@@ -69,13 +69,12 @@ OUTPUTFORMAT 
 LOCATION
 #### A masked pattern was here ####
 TBLPROPERTIES (
-  'EXTERNAL'='FALSE', 
   'numFiles'='0', 
+  'EXTERNAL'='FALSE', 
 #### A masked pattern was here ####
   'COLUMN_STATS_ACCURATE'='false', 
-#### A masked pattern was here ####
-  'numRows'='-1', 
   'totalSize'='0', 
+  'numRows'='-1', 
   'rawDataSize'='-1')
 PREHOOK: query: -- Alter the table comment, change the EXTERNAL property back and test SHOW CREATE TABLE on the change.
 ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='changed comment', 'EXTERNAL'='TRUE')
@@ -114,9 +113,8 @@ TBLPROPERTIES (
   'numFiles'='0', 
 #### A masked pattern was here ####
   'COLUMN_STATS_ACCURATE'='false', 
-#### A masked pattern was here ####
-  'numRows'='-1', 
   'totalSize'='0', 
+  'numRows'='-1', 
   'rawDataSize'='-1')
 PREHOOK: query: -- Change the 'SORTBUCKETCOLSPREFIX' property and test SHOW CREATE TABLE. The output should not change.
 ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('SORTBUCKETCOLSPREFIX'='FALSE')
@@ -155,9 +153,8 @@ TBLPROPERTIES (
   'numFiles'='0', 
 #### A masked pattern was here ####
   'COLUMN_STATS_ACCURATE'='false', 
-#### A masked pattern was here ####
-  'numRows'='-1', 
   'totalSize'='0', 
+  'numRows'='-1', 
   'rawDataSize'='-1')
 PREHOOK: query: -- Alter the storage handler of the table, and test SHOW CREATE TABLE.
 ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('storage_handler'='org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler')
@@ -196,9 +193,8 @@ TBLPROPERTIES (
   'numFiles'='0', 
 #### A masked pattern was here ####
   'COLUMN_STATS_ACCURATE'='false', 
-#### A masked pattern was here ####
-  'numRows'='-1', 
   'totalSize'='0', 
+  'numRows'='-1', 
   'rawDataSize'='-1')
 PREHOOK: query: DROP TABLE tmp_showcrt1
 PREHOOK: type: DROPTABLE

Modified: hive/trunk/ql/src/test/results/clientpositive/show_tblproperties.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_tblproperties.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/show_tblproperties.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/show_tblproperties.q.out Mon Apr 28 15:40:31 2014
@@ -33,11 +33,11 @@ POSTHOOK: type: SHOW_TBLPROPERTIES
 numFiles	0
 #### A masked pattern was here ####
 tmp	true
-COLUMN_STATS_ACCURATE	false
 #### A masked pattern was here ####
-bar	bar value
-numRows	-1
+COLUMN_STATS_ACCURATE	false
 totalSize	0
+numRows	-1
+bar	bar value
 rawDataSize	-1
 PREHOOK: query: show tblproperties tmpfoo("bar")
 PREHOOK: type: SHOW_TBLPROPERTIES

Modified: hive/trunk/ql/src/test/results/clientpositive/stats_list_bucket.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/stats_list_bucket.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/stats_list_bucket.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/stats_list_bucket.q.out Mon Apr 28 15:40:31 2014
@@ -166,7 +166,7 @@ Table Parameters:	 	 
 	numFiles            	4                   
 	numRows             	500                 
 	rawDataSize         	4812                
-	totalSize           	408                 
+	totalSize           	5522                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1_23.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1_23.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1_23.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1_23.q.out Mon Apr 28 15:40:31 2014
@@ -86,10 +86,10 @@ Protect Mode:       	None               
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	COLUMN_STATS_ACCURATE	false               
-	numFiles            	22                  
+	numFiles            	1                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	6954                
+	totalSize           	5297                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -185,10 +185,10 @@ Protect Mode:       	None               
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	COLUMN_STATS_ACCURATE	true                
-	numFiles            	22                  
+	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	4812                
-	totalSize           	6954                
+	totalSize           	5297                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -235,10 +235,10 @@ Protect Mode:       	None               
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	COLUMN_STATS_ACCURATE	false               
-	numFiles            	22                  
+	numFiles            	1                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	6954                
+	totalSize           	5297                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/symlink_text_input_format.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/symlink_text_input_format.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/symlink_text_input_format.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/symlink_text_input_format.q.out Mon Apr 28 15:40:31 2014
@@ -41,22 +41,22 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: symlink_text_input_format
-            Statistics: Num rows: 0 Data size: 72 Basic stats: PARTIAL Column stats: NONE
+            Statistics: Num rows: 0 Data size: 96 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: key (type: string), value (type: string)
               outputColumnNames: _col0, _col1
-              Statistics: Num rows: 0 Data size: 72 Basic stats: PARTIAL Column stats: NONE
+              Statistics: Num rows: 0 Data size: 96 Basic stats: PARTIAL Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: string), _col1 (type: string)
                 sort order: ++
-                Statistics: Num rows: 0 Data size: 72 Basic stats: PARTIAL Column stats: NONE
+                Statistics: Num rows: 0 Data size: 96 Basic stats: PARTIAL Column stats: NONE
                 value expressions: _col0 (type: string), _col1 (type: string)
       Reduce Operator Tree:
         Extract
-          Statistics: Num rows: 0 Data size: 72 Basic stats: PARTIAL Column stats: NONE
+          Statistics: Num rows: 0 Data size: 96 Basic stats: PARTIAL Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 0 Data size: 72 Basic stats: PARTIAL Column stats: NONE
+            Statistics: Num rows: 0 Data size: 96 Basic stats: PARTIAL Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -104,22 +104,22 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: symlink_text_input_format
-            Statistics: Num rows: 0 Data size: 72 Basic stats: PARTIAL Column stats: NONE
+            Statistics: Num rows: 0 Data size: 96 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: value (type: string)
               outputColumnNames: _col0
-              Statistics: Num rows: 0 Data size: 72 Basic stats: PARTIAL Column stats: NONE
+              Statistics: Num rows: 0 Data size: 96 Basic stats: PARTIAL Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: string)
                 sort order: +
-                Statistics: Num rows: 0 Data size: 72 Basic stats: PARTIAL Column stats: NONE
+                Statistics: Num rows: 0 Data size: 96 Basic stats: PARTIAL Column stats: NONE
                 value expressions: _col0 (type: string)
       Reduce Operator Tree:
         Extract
-          Statistics: Num rows: 0 Data size: 72 Basic stats: PARTIAL Column stats: NONE
+          Statistics: Num rows: 0 Data size: 96 Basic stats: PARTIAL Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 0 Data size: 72 Basic stats: PARTIAL Column stats: NONE
+            Statistics: Num rows: 0 Data size: 96 Basic stats: PARTIAL Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -167,9 +167,9 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: symlink_text_input_format
-            Statistics: Num rows: 0 Data size: 72 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 96 Basic stats: PARTIAL Column stats: COMPLETE
             Select Operator
-              Statistics: Num rows: 0 Data size: 72 Basic stats: PARTIAL Column stats: COMPLETE
+              Statistics: Num rows: 0 Data size: 96 Basic stats: PARTIAL Column stats: COMPLETE
               Group By Operator
                 aggregations: count(1)
                 mode: hash

Modified: hive/trunk/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
Files hive/trunk/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out (original) and hive/trunk/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out Mon Apr 28 15:40:31 2014 differ

Modified: hive/trunk/ql/src/test/results/clientpositive/udf_current_database.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/udf_current_database.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/udf_current_database.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/udf_current_database.q.out Mon Apr 28 15:40:31 2014
@@ -20,14 +20,14 @@ STAGE PLANS:
           TableScan
             alias: _dummy_table
             Row Limit Per Split: 1
-            Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
             Select Operator
               expressions: current_database() (type: string)
               outputColumnNames: _col0
-              Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+              Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -71,14 +71,14 @@ STAGE PLANS:
           TableScan
             alias: _dummy_table
             Row Limit Per Split: 1
-            Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
             Select Operator
               expressions: current_database() (type: string)
               outputColumnNames: _col0
-              Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+              Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -118,11 +118,11 @@ STAGE PLANS:
         TableScan
           alias: _dummy_table
           Row Limit Per Split: 1
-          Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+          Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
           Select Operator
             expressions: current_database() (type: string)
             outputColumnNames: _col0
-            Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
             ListSink
 
 PREHOOK: query: select current_database()
@@ -155,11 +155,11 @@ STAGE PLANS:
         TableScan
           alias: _dummy_table
           Row Limit Per Split: 1
-          Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+          Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
           Select Operator
             expressions: current_database() (type: string)
             outputColumnNames: _col0
-            Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+            Statistics: Num rows: 0 Data size: 13 Basic stats: PARTIAL Column stats: COMPLETE
             ListSink
 
 PREHOOK: query: select current_database()

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_1.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_1.q.out Mon Apr 28 15:40:31 2014
@@ -205,7 +205,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	40                  
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_10.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_10.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_10.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_10.q.out Mon Apr 28 15:40:31 2014
@@ -268,10 +268,10 @@ Retention:          	0                  
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
 	COLUMN_STATS_ACCURATE	false               
-	numFiles            	2                   
+	numFiles            	3                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	340                 
+	totalSize           	271                 
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_12.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_12.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_12.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_12.q.out Mon Apr 28 15:40:31 2014
@@ -228,7 +228,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	194                 
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_13.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_13.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_13.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_13.q.out Mon Apr 28 15:40:31 2014
@@ -251,7 +251,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	192                 
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_14.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_14.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_14.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_14.q.out Mon Apr 28 15:40:31 2014
@@ -230,7 +230,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	194                 
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_19.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_19.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_19.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_19.q.out Mon Apr 28 15:40:31 2014
@@ -205,7 +205,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	40                  
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_2.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_2.q.out Mon Apr 28 15:40:31 2014
@@ -212,7 +212,7 @@ Table Parameters:	 	 
 	numFiles            	3                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	408                 
+	totalSize           	68                  
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_20.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_20.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_20.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_20.q.out Mon Apr 28 15:40:31 2014
@@ -207,7 +207,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	40                  
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_21.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_21.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_21.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_21.q.out Mon Apr 28 15:40:31 2014
@@ -204,7 +204,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	20                  
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_22.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_22.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_22.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_22.q.out Mon Apr 28 15:40:31 2014
@@ -208,7 +208,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	60                  
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_23.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_23.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_23.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_23.q.out Mon Apr 28 15:40:31 2014
@@ -246,7 +246,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	40                  
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_24.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_24.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_24.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_24.q.out Mon Apr 28 15:40:31 2014
@@ -203,7 +203,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	60                  
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Added: hive/trunk/ql/src/test/results/clientpositive/union_remove_25.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_25.q.out?rev=1590681&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_25.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_25.q.out Mon Apr 28 15:40:31 2014
@@ -0,0 +1,667 @@
+PREHOOK: query: -- This is to test the union->selectstar->filesink optimization
+-- Union of 2 map-reduce subqueries is performed followed by select star and a file sink
+-- There is no need to write the temporary results of the sub-queries, and then read them 
+-- again to process the union. The union can be removed completely.
+-- It does not matter, whether the output is merged or not. In this case, merging is turned
+-- off
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- Since this test creates sub-directories for the output table outputTbl1, it might be easier
+-- to run the test only on hadoop 23
+
+create table inputTbl1(key string, val string) stored as textfile
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: -- This is to test the union->selectstar->filesink optimization
+-- Union of 2 map-reduce subqueries is performed followed by select star and a file sink
+-- There is no need to write the temporary results of the sub-queries, and then read them 
+-- again to process the union. The union can be removed completely.
+-- It does not matter, whether the output is merged or not. In this case, merging is turned
+-- off
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- Since this test creates sub-directories for the output table outputTbl1, it might be easier
+-- to run the test only on hadoop 23
+
+create table inputTbl1(key string, val string) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@inputTbl1
+PREHOOK: query: create table outputTbl1(key string, values bigint) partitioned by (ds string) stored as textfile
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: create table outputTbl1(key string, values bigint) partitioned by (ds string) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@outputTbl1
+PREHOOK: query: create table outputTbl2(key string, values bigint) partitioned by (ds string) stored as textfile
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: create table outputTbl2(key string, values bigint) partitioned by (ds string) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@outputTbl2
+PREHOOK: query: create table outputTbl3(key string, values bigint) partitioned by (ds string,hr string) stored as textfile
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: create table outputTbl3(key string, values bigint) partitioned by (ds string,hr string) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@outputTbl3
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@inputtbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@inputtbl1
+PREHOOK: query: explain
+insert overwrite table outputTbl1 partition(ds='2004')
+SELECT *
+FROM (
+  SELECT key, count(1) as values from inputTbl1 group by key
+  UNION ALL
+  SELECT key, count(1) as values from inputTbl1 group by key
+) a
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+insert overwrite table outputTbl1 partition(ds='2004')
+SELECT *
+FROM (
+  SELECT key, count(1) as values from inputTbl1 group by key
+  UNION ALL
+  SELECT key, count(1) as values from inputTbl1 group by key
+) a
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1, Stage-2
+  Stage-2 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: inputtbl1
+            Statistics: Num rows: 0 Data size: 30 Basic stats: PARTIAL Column stats: NONE
+            Select Operator
+              expressions: key (type: string)
+              outputColumnNames: key
+              Statistics: Num rows: 0 Data size: 30 Basic stats: PARTIAL Column stats: NONE
+              Group By Operator
+                aggregations: count(1)
+                keys: key (type: string)
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 0 Data size: 30 Basic stats: PARTIAL Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 0 Data size: 30 Basic stats: PARTIAL Column stats: NONE
+                  value expressions: _col1 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          keys: KEY._col0 (type: string)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: string), _col1 (type: bigint)
+            outputColumnNames: _col0, _col1
+            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2004
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.outputtbl1
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: inputtbl1
+            Statistics: Num rows: 0 Data size: 30 Basic stats: PARTIAL Column stats: NONE
+            Select Operator
+              expressions: key (type: string)
+              outputColumnNames: key
+              Statistics: Num rows: 0 Data size: 30 Basic stats: PARTIAL Column stats: NONE
+              Group By Operator
+                aggregations: count(1)
+                keys: key (type: string)
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 0 Data size: 30 Basic stats: PARTIAL Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 0 Data size: 30 Basic stats: PARTIAL Column stats: NONE
+                  value expressions: _col1 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          keys: KEY._col0 (type: string)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: string), _col1 (type: bigint)
+            outputColumnNames: _col0, _col1
+            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
+
+PREHOOK: query: insert overwrite table outputTbl1 partition(ds='2004')
+SELECT *
+FROM (
+  SELECT key, count(1) as values from inputTbl1 group by key
+  UNION ALL
+  SELECT key, count(1) as values from inputTbl1 group by key
+) a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@inputtbl1
+PREHOOK: Output: default@outputtbl1@ds=2004
+POSTHOOK: query: insert overwrite table outputTbl1 partition(ds='2004')
+SELECT *
+FROM (
+  SELECT key, count(1) as values from inputTbl1 group by key
+  UNION ALL
+  SELECT key, count(1) as values from inputTbl1 group by key
+) a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@inputtbl1
+POSTHOOK: Output: default@outputtbl1@ds=2004
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ]
+PREHOOK: query: desc formatted outputTbl1 partition(ds='2004')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@outputtbl1
+POSTHOOK: query: desc formatted outputTbl1 partition(ds='2004')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@outputtbl1
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+values              	bigint              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2004]              	 
+Database:           	default             	 
+Table:              	outputtbl1          	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	false               
+	numFiles            	2                   
+	numRows             	-1                  
+	rawDataSize         	-1                  
+	totalSize           	40                  
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: select * from outputTbl1 order by key, values
+PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
+PREHOOK: Input: default@outputtbl1@ds=2004
+#### A masked pattern was here ####
+POSTHOOK: query: select * from outputTbl1 order by key, values
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
+POSTHOOK: Input: default@outputtbl1@ds=2004
+#### A masked pattern was here ####
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ]
+1	1	2004
+1	1	2004
+2	1	2004
+2	1	2004
+3	1	2004
+3	1	2004
+7	1	2004
+7	1	2004
+8	2	2004
+8	2	2004
+PREHOOK: query: explain 
+insert overwrite table outputTbl2 partition(ds)
+SELECT *
+FROM (
+  SELECT key, value, ds from srcpart where ds='2008-04-08' limit 500
+  UNION ALL
+  SELECT key, value, ds from srcpart where ds='2008-04-08' limit 500
+) a
+PREHOOK: type: QUERY
+POSTHOOK: query: explain 
+insert overwrite table outputTbl2 partition(ds)
+SELECT *
+FROM (
+  SELECT key, value, ds from srcpart where ds='2008-04-08' limit 500
+  UNION ALL
+  SELECT key, value, ds from srcpart where ds='2008-04-08' limit 500
+) a
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ]
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1, Stage-2
+  Stage-2 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: string), value (type: string), ds (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+              Limit
+                Number of rows: 500
+                Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+      Reduce Operator Tree:
+        Extract
+          Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+          Limit
+            Number of rows: 500
+            Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: string), UDFToLong(_col1) (type: bigint), _col2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.outputtbl2
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.outputtbl2
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: string), value (type: string), ds (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+              Limit
+                Number of rows: 500
+                Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+      Reduce Operator Tree:
+        Extract
+          Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+          Limit
+            Number of rows: 500
+            Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: string), UDFToLong(_col1) (type: bigint), _col2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.outputtbl2
+
+PREHOOK: query: insert overwrite table outputTbl2 partition(ds)
+SELECT *
+FROM (
+  SELECT key, value, ds from srcpart where ds='2008-04-08' limit 500
+  UNION ALL
+  SELECT key, value, ds from srcpart where ds='2008-04-08' limit 500
+) a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@outputtbl2
+POSTHOOK: query: insert overwrite table outputTbl2 partition(ds)
+SELECT *
+FROM (
+  SELECT key, value, ds from srcpart where ds='2008-04-08' limit 500
+  UNION ALL
+  SELECT key, value, ds from srcpart where ds='2008-04-08' limit 500
+) a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@outputtbl2@ds=2008-04-08
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: show partitions outputTbl2
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@outputtbl2
+POSTHOOK: query: show partitions outputTbl2
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@outputtbl2
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+ds=2008-04-08
+PREHOOK: query: desc formatted outputTbl2 partition(ds='2008-04-08')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@outputtbl2
+POSTHOOK: query: desc formatted outputTbl2 partition(ds='2008-04-08')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@outputtbl2
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+values              	bigint              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08]        	 
+Database:           	default             	 
+Table:              	outputtbl2          	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	false               
+	numFiles            	2                   
+	numRows             	-1                  
+	rawDataSize         	-1                  
+	totalSize           	6812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: explain insert overwrite table outputTbl3 partition(ds, hr)
+SELECT *
+FROM (
+  SELECT key, value, ds, hr from srcpart where ds='2008-04-08' limit 1000
+  UNION ALL
+  SELECT key, value, ds, hr from srcpart where ds='2008-04-08' limit 1000
+) a
+PREHOOK: type: QUERY
+POSTHOOK: query: explain insert overwrite table outputTbl3 partition(ds, hr)
+SELECT *
+FROM (
+  SELECT key, value, ds, hr from srcpart where ds='2008-04-08' limit 1000
+  UNION ALL
+  SELECT key, value, ds, hr from srcpart where ds='2008-04-08' limit 1000
+) a
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1, Stage-2
+  Stage-2 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+              Limit
+                Number of rows: 1000
+                Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
+      Reduce Operator Tree:
+        Extract
+          Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+          Limit
+            Number of rows: 1000
+            Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: string), UDFToLong(_col1) (type: bigint), _col2 (type: string), _col3 (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.outputtbl3
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 
+            hr 
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.outputtbl3
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+              Limit
+                Number of rows: 1000
+                Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
+      Reduce Operator Tree:
+        Extract
+          Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+          Limit
+            Number of rows: 1000
+            Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: string), UDFToLong(_col1) (type: bigint), _col2 (type: string), _col3 (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.outputtbl3
+
+PREHOOK: query: insert overwrite table outputTbl3 partition(ds, hr)
+SELECT *
+FROM (
+  SELECT key, value, ds, hr from srcpart where ds='2008-04-08' limit 1000
+  UNION ALL
+  SELECT key, value, ds, hr from srcpart where ds='2008-04-08' limit 1000
+) a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@outputtbl3
+POSTHOOK: query: insert overwrite table outputTbl3 partition(ds, hr)
+SELECT *
+FROM (
+  SELECT key, value, ds, hr from srcpart where ds='2008-04-08' limit 1000
+  UNION ALL
+  SELECT key, value, ds, hr from srcpart where ds='2008-04-08' limit 1000
+) a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@outputtbl3@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@outputtbl3@ds=2008-04-08/hr=12
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl3 PARTITION(ds=2008-04-08,hr=11).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl3 PARTITION(ds=2008-04-08,hr=11).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl3 PARTITION(ds=2008-04-08,hr=12).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: show partitions outputTbl3
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@outputtbl3
+POSTHOOK: query: show partitions outputTbl3
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@outputtbl3
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl3 PARTITION(ds=2008-04-08,hr=11).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl3 PARTITION(ds=2008-04-08,hr=11).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl3 PARTITION(ds=2008-04-08,hr=12).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+ds=2008-04-08/hr=11
+ds=2008-04-08/hr=12
+PREHOOK: query: desc formatted outputTbl3 partition(ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@outputtbl3
+POSTHOOK: query: desc formatted outputTbl3 partition(ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@outputtbl3
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: outputtbl1 PARTITION(ds=2004).values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl2 PARTITION(ds=2008-04-08).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl3 PARTITION(ds=2008-04-08,hr=11).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl3 PARTITION(ds=2008-04-08,hr=11).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: outputtbl3 PARTITION(ds=2008-04-08,hr=12).values EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), (srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+values              	bigint              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	outputtbl3          	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	false               
+	numFiles            	2                   
+	numRows             	-1                  
+	rawDataSize         	-1                  
+	totalSize           	6812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_4.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_4.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_4.q.out Mon Apr 28 15:40:31 2014
@@ -249,7 +249,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	40                  
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_5.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_5.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_5.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_5.q.out Mon Apr 28 15:40:31 2014
@@ -258,7 +258,7 @@ Table Parameters:	 	 
 	numFiles            	3                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	408                 
+	totalSize           	68                  
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_7.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_7.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_7.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_7.q.out Mon Apr 28 15:40:31 2014
@@ -209,7 +209,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	178                 
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_8.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_8.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_8.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_8.q.out Mon Apr 28 15:40:31 2014
@@ -216,7 +216,7 @@ Table Parameters:	 	 
 	numFiles            	3                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	408                 
+	totalSize           	271                 
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/union_remove_9.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/union_remove_9.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/union_remove_9.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/union_remove_9.q.out Mon Apr 28 15:40:31 2014
@@ -255,7 +255,7 @@ Table Parameters:	 	 
 	numFiles            	2                   
 	numRows             	-1                  
 	rawDataSize         	-1                  
-	totalSize           	272                 
+	totalSize           	192                 
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/trunk/ql/src/test/results/clientpositive/unset_table_view_property.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/unset_table_view_property.q.out?rev=1590681&r1=1590680&r2=1590681&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/unset_table_view_property.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/unset_table_view_property.q.out Mon Apr 28 15:40:31 2014
@@ -29,10 +29,10 @@ numFiles	0
 c	3
 #### A masked pattern was here ####
 a	1
-COLUMN_STATS_ACCURATE	false
 #### A masked pattern was here ####
-numRows	-1
+COLUMN_STATS_ACCURATE	false
 totalSize	0
+numRows	-1
 rawDataSize	-1
 PREHOOK: query: -- UNSET all the properties
 ALTER TABLE testTable UNSET TBLPROPERTIES ('a', 'c')
@@ -51,9 +51,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES
 numFiles	0
 #### A masked pattern was here ####
 COLUMN_STATS_ACCURATE	false
-#### A masked pattern was here ####
-numRows	-1
 totalSize	0
+numRows	-1
 rawDataSize	-1
 PREHOOK: query: ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'c'='3', 'd'='4')
 PREHOOK: type: ALTERTABLE_PROPERTIES
@@ -67,16 +66,16 @@ PREHOOK: query: SHOW TBLPROPERTIES testT
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
-d	4
 numFiles	0
+d	4
 #### A masked pattern was here ####
 c	3
 #### A masked pattern was here ####
 a	1
-COLUMN_STATS_ACCURATE	false
 #### A masked pattern was here ####
-numRows	-1
+COLUMN_STATS_ACCURATE	false
 totalSize	0
+numRows	-1
 rawDataSize	-1
 PREHOOK: query: -- UNSET a subset of the properties
 ALTER TABLE testTable UNSET TBLPROPERTIES ('a', 'd')
@@ -97,9 +96,8 @@ numFiles	0
 c	3
 #### A masked pattern was here ####
 COLUMN_STATS_ACCURATE	false
-#### A masked pattern was here ####
-numRows	-1
 totalSize	0
+numRows	-1
 rawDataSize	-1
 PREHOOK: query: -- the same property being UNSET multiple times
 ALTER TABLE testTable UNSET TBLPROPERTIES ('c', 'c', 'c')
@@ -118,9 +116,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES
 numFiles	0
 #### A masked pattern was here ####
 COLUMN_STATS_ACCURATE	false
-#### A masked pattern was here ####
-numRows	-1
 totalSize	0
+numRows	-1
 rawDataSize	-1
 PREHOOK: query: ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'b' = '2', 'c'='3', 'd'='4')
 PREHOOK: type: ALTERTABLE_PROPERTIES
@@ -134,17 +131,17 @@ PREHOOK: query: SHOW TBLPROPERTIES testT
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
-d	4
 numFiles	0
+d	4
 #### A masked pattern was here ####
 b	2
 c	3
 #### A masked pattern was here ####
 a	1
-COLUMN_STATS_ACCURATE	false
 #### A masked pattern was here ####
-numRows	-1
+COLUMN_STATS_ACCURATE	false
 totalSize	0
+numRows	-1
 rawDataSize	-1
 PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS
 ALTER TABLE testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'b', 'f')
@@ -165,10 +162,10 @@ numFiles	0
 c	3
 #### A masked pattern was here ####
 a	1
-COLUMN_STATS_ACCURATE	false
 #### A masked pattern was here ####
-numRows	-1
+COLUMN_STATS_ACCURATE	false
 totalSize	0
+numRows	-1
 rawDataSize	-1
 PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS
 ALTER TABLE testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'c', 'f', 'x', 'y', 'z')
@@ -187,10 +184,10 @@ POSTHOOK: type: SHOW_TBLPROPERTIES
 numFiles	0
 #### A masked pattern was here ####
 a	1
-COLUMN_STATS_ACCURATE	false
 #### A masked pattern was here ####
-numRows	-1
+COLUMN_STATS_ACCURATE	false
 totalSize	0
+numRows	-1
 rawDataSize	-1
 PREHOOK: query: -- UNSET VIEW PROPERTIES
 CREATE VIEW testView AS SELECT value FROM src WHERE key=86