You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2015/11/30 01:11:56 UTC

[59/91] [abbrv] hive git commit: HIVE-12411: Remove counter based stats collection mechanism (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/ab98ffc2/ql/src/test/results/clientpositive/stats_counter_partitioned.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats_counter_partitioned.q.out b/ql/src/test/results/clientpositive/stats_counter_partitioned.q.out
deleted file mode 100644
index 626dcff..0000000
--- a/ql/src/test/results/clientpositive/stats_counter_partitioned.q.out
+++ /dev/null
@@ -1,465 +0,0 @@
-PREHOOK: query: -- partitioned table analyze 
-
-create table dummy (key string, value string) partitioned by (ds string, hr string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dummy
-POSTHOOK: query: -- partitioned table analyze 
-
-create table dummy (key string, value string) partitioned by (ds string, hr string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dummy
-PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table dummy partition (ds='2008',hr='12')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@dummy
-POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table dummy partition (ds='2008',hr='12')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@dummy
-POSTHOOK: Output: default@dummy@ds=2008/hr=12
-PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table dummy partition (ds='2008',hr='11')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@dummy
-POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table dummy partition (ds='2008',hr='11')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@dummy
-POSTHOOK: Output: default@dummy@ds=2008/hr=11
-PREHOOK: query: analyze table dummy partition (ds,hr) compute statistics
-PREHOOK: type: QUERY
-PREHOOK: Input: default@dummy
-PREHOOK: Input: default@dummy@ds=2008/hr=11
-PREHOOK: Input: default@dummy@ds=2008/hr=12
-PREHOOK: Output: default@dummy
-PREHOOK: Output: default@dummy@ds=2008/hr=11
-PREHOOK: Output: default@dummy@ds=2008/hr=12
-POSTHOOK: query: analyze table dummy partition (ds,hr) compute statistics
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@dummy
-POSTHOOK: Input: default@dummy@ds=2008/hr=11
-POSTHOOK: Input: default@dummy@ds=2008/hr=12
-POSTHOOK: Output: default@dummy
-POSTHOOK: Output: default@dummy@ds=2008/hr=11
-POSTHOOK: Output: default@dummy@ds=2008/hr=12
-PREHOOK: query: describe formatted dummy partition (ds='2008', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (ds='2008', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008, 11]          	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: describe formatted dummy partition (ds='2008', hr='12')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (ds='2008', hr='12')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008, 12]          	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: drop table dummy
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@dummy
-PREHOOK: Output: default@dummy
-POSTHOOK: query: drop table dummy
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@dummy
-POSTHOOK: Output: default@dummy
-PREHOOK: query: -- static partitioned table on insert
-
-create table dummy (key string, value string) partitioned by (ds string, hr string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dummy
-POSTHOOK: query: -- static partitioned table on insert
-
-create table dummy (key string, value string) partitioned by (ds string, hr string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dummy
-PREHOOK: query: insert overwrite table dummy partition (ds='10',hr='11') select * from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@dummy@ds=10/hr=11
-POSTHOOK: query: insert overwrite table dummy partition (ds='10',hr='11') select * from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@dummy@ds=10/hr=11
-POSTHOOK: Lineage: dummy PARTITION(ds=10,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: dummy PARTITION(ds=10,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: insert overwrite table dummy partition (ds='10',hr='12') select * from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@dummy@ds=10/hr=12
-POSTHOOK: query: insert overwrite table dummy partition (ds='10',hr='12') select * from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@dummy@ds=10/hr=12
-POSTHOOK: Lineage: dummy PARTITION(ds=10,hr=12).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: dummy PARTITION(ds=10,hr=12).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: describe formatted dummy partition (ds='10', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (ds='10', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[10, 11]            	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: describe formatted dummy partition (ds='10', hr='12')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (ds='10', hr='12')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[10, 12]            	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: drop table dummy
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@dummy
-PREHOOK: Output: default@dummy
-POSTHOOK: query: drop table dummy
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@dummy
-POSTHOOK: Output: default@dummy
-PREHOOK: query: -- dynamic partitioned table on insert
-
-create table dummy (key int) partitioned by (hr int)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dummy
-POSTHOOK: query: -- dynamic partitioned table on insert
-
-create table dummy (key int) partitioned by (hr int)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dummy
-PREHOOK: query: CREATE TABLE tbl(key int, value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@tbl
-POSTHOOK: query: CREATE TABLE tbl(key int, value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@tbl
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tbl.txt' OVERWRITE INTO TABLE tbl
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@tbl
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tbl.txt' OVERWRITE INTO TABLE tbl
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@tbl
-PREHOOK: query: insert overwrite table dummy partition (hr) select * from tbl
-PREHOOK: type: QUERY
-PREHOOK: Input: default@tbl
-PREHOOK: Output: default@dummy
-POSTHOOK: query: insert overwrite table dummy partition (hr) select * from tbl
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@tbl
-POSTHOOK: Output: default@dummy@hr=1994
-POSTHOOK: Output: default@dummy@hr=1996
-POSTHOOK: Output: default@dummy@hr=1997
-POSTHOOK: Output: default@dummy@hr=1998
-POSTHOOK: Lineage: dummy PARTITION(hr=1994).key SIMPLE [(tbl)tbl.FieldSchema(name:key, type:int, comment:null), ]
-POSTHOOK: Lineage: dummy PARTITION(hr=1996).key SIMPLE [(tbl)tbl.FieldSchema(name:key, type:int, comment:null), ]
-POSTHOOK: Lineage: dummy PARTITION(hr=1997).key SIMPLE [(tbl)tbl.FieldSchema(name:key, type:int, comment:null), ]
-POSTHOOK: Lineage: dummy PARTITION(hr=1998).key SIMPLE [(tbl)tbl.FieldSchema(name:key, type:int, comment:null), ]
-PREHOOK: query: describe formatted dummy partition (hr=1997)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (hr=1997)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-hr                  	int                 	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1997]              	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	6                   
-	rawDataSize         	6                   
-	totalSize           	12                  
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: describe formatted dummy partition (hr=1994)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (hr=1994)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-hr                  	int                 	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1994]              	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	1                   
-	rawDataSize         	1                   
-	totalSize           	2                   
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: describe formatted dummy partition (hr=1998)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (hr=1998)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-hr                  	int                 	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1998]              	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	2                   
-	rawDataSize         	2                   
-	totalSize           	4                   
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: describe formatted dummy partition (hr=1996)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (hr=1996)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-hr                  	int                 	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1996]              	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	1                   
-	rawDataSize         	1                   
-	totalSize           	2                   
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: drop table tbl
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@tbl
-PREHOOK: Output: default@tbl
-POSTHOOK: query: drop table tbl
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@tbl
-POSTHOOK: Output: default@tbl
-PREHOOK: query: drop table dummy
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@dummy
-PREHOOK: Output: default@dummy
-POSTHOOK: query: drop table dummy
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@dummy
-POSTHOOK: Output: default@dummy

http://git-wip-us.apache.org/repos/asf/hive/blob/ab98ffc2/ql/src/test/results/clientpositive/tez/metadataonly1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/metadataonly1.q.out b/ql/src/test/results/clientpositive/tez/metadataonly1.q.out
index 1fb166b..4ef71f8 100644
--- a/ql/src/test/results/clientpositive/tez/metadataonly1.q.out
+++ b/ql/src/test/results/clientpositive/tez/metadataonly1.q.out
@@ -170,9 +170,9 @@ STAGE PLANS:
                         value expressions: _col0 (type: string)
                         auto parallelism: false
             Path -> Alias:
-              -mr-10003default.test1{ds=1} [test1]
+              -mr-10004default.test1{ds=1} [test1]
             Path -> Partition:
-              -mr-10003default.test1{ds=1} 
+              -mr-10004default.test1{ds=1} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -212,7 +212,7 @@ STAGE PLANS:
                     name: default.test1
                   name: default.test1
             Truncated Path -> Alias:
-              -mr-10003default.test1{ds=1} [test1]
+              -mr-10004default.test1{ds=1} [test1]
         Reducer 2 
             Needs Tagging: false
             Reduce Operator Tree:
@@ -318,9 +318,9 @@ STAGE PLANS:
                         tag: -1
                         auto parallelism: false
             Path -> Alias:
-              -mr-10003default.test1{ds=1} [test1]
+              -mr-10004default.test1{ds=1} [test1]
             Path -> Partition:
-              -mr-10003default.test1{ds=1} 
+              -mr-10004default.test1{ds=1} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -360,7 +360,7 @@ STAGE PLANS:
                     name: default.test1
                   name: default.test1
             Truncated Path -> Alias:
-              -mr-10003default.test1{ds=1} [test1]
+              -mr-10004default.test1{ds=1} [test1]
         Reducer 2 
             Needs Tagging: false
             Reduce Operator Tree:
@@ -1038,11 +1038,11 @@ STAGE PLANS:
                         tag: -1
                         auto parallelism: true
             Path -> Alias:
-              -mr-10003default.test2{ds=1, hr=1} [test2]
-              -mr-10004default.test2{ds=1, hr=2} [test2]
-              -mr-10005default.test2{ds=1, hr=3} [test2]
+              -mr-10004default.test2{ds=1, hr=1} [test2]
+              -mr-10005default.test2{ds=1, hr=2} [test2]
+              -mr-10006default.test2{ds=1, hr=3} [test2]
             Path -> Partition:
-              -mr-10003default.test2{ds=1, hr=1} 
+              -mr-10004default.test2{ds=1, hr=1} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1082,7 +1082,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.test2
                   name: default.test2
-              -mr-10004default.test2{ds=1, hr=2} 
+              -mr-10005default.test2{ds=1, hr=2} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1122,7 +1122,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.test2
                   name: default.test2
-              -mr-10005default.test2{ds=1, hr=3} 
+              -mr-10006default.test2{ds=1, hr=3} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1163,9 +1163,9 @@ STAGE PLANS:
                     name: default.test2
                   name: default.test2
             Truncated Path -> Alias:
-              -mr-10003default.test2{ds=1, hr=1} [test2]
-              -mr-10004default.test2{ds=1, hr=2} [test2]
-              -mr-10005default.test2{ds=1, hr=3} [test2]
+              -mr-10004default.test2{ds=1, hr=1} [test2]
+              -mr-10005default.test2{ds=1, hr=2} [test2]
+              -mr-10006default.test2{ds=1, hr=3} [test2]
         Reducer 2 
             Needs Tagging: false
             Reduce Operator Tree:
@@ -1521,10 +1521,10 @@ STAGE PLANS:
                         value expressions: _col0 (type: string)
                         auto parallelism: false
             Path -> Alias:
-              -mr-10003default.test1{ds=1} [test1]
-              -mr-10004default.test1{ds=2} [test1]
+              -mr-10004default.test1{ds=1} [test1]
+              -mr-10005default.test1{ds=2} [test1]
             Path -> Partition:
-              -mr-10003default.test1{ds=1} 
+              -mr-10004default.test1{ds=1} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1563,7 +1563,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.test1
                   name: default.test1
-              -mr-10004default.test1{ds=2} 
+              -mr-10005default.test1{ds=2} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1603,8 +1603,8 @@ STAGE PLANS:
                     name: default.test1
                   name: default.test1
             Truncated Path -> Alias:
-              -mr-10003default.test1{ds=1} [test1]
-              -mr-10004default.test1{ds=2} [test1]
+              -mr-10004default.test1{ds=1} [test1]
+              -mr-10005default.test1{ds=2} [test1]
         Reducer 2 
             Needs Tagging: false
             Reduce Operator Tree:
@@ -1770,13 +1770,13 @@ STAGE PLANS:
                         tag: -1
                         auto parallelism: true
             Path -> Alias:
-              -mr-10003default.test2{ds=01_10_10, hr=01} [test2]
-              -mr-10004default.test2{ds=01_10_20, hr=02} [test2]
-              -mr-10005default.test2{ds=1, hr=1} [test2]
-              -mr-10006default.test2{ds=1, hr=2} [test2]
-              -mr-10007default.test2{ds=1, hr=3} [test2]
+              -mr-10004default.test2{ds=01_10_10, hr=01} [test2]
+              -mr-10005default.test2{ds=01_10_20, hr=02} [test2]
+              -mr-10006default.test2{ds=1, hr=1} [test2]
+              -mr-10007default.test2{ds=1, hr=2} [test2]
+              -mr-10008default.test2{ds=1, hr=3} [test2]
             Path -> Partition:
-              -mr-10003default.test2{ds=01_10_10, hr=01} 
+              -mr-10004default.test2{ds=01_10_10, hr=01} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1816,7 +1816,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.test2
                   name: default.test2
-              -mr-10004default.test2{ds=01_10_20, hr=02} 
+              -mr-10005default.test2{ds=01_10_20, hr=02} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1856,7 +1856,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.test2
                   name: default.test2
-              -mr-10005default.test2{ds=1, hr=1} 
+              -mr-10006default.test2{ds=1, hr=1} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1896,7 +1896,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.test2
                   name: default.test2
-              -mr-10006default.test2{ds=1, hr=2} 
+              -mr-10007default.test2{ds=1, hr=2} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1936,7 +1936,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.test2
                   name: default.test2
-              -mr-10007default.test2{ds=1, hr=3} 
+              -mr-10008default.test2{ds=1, hr=3} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1977,11 +1977,11 @@ STAGE PLANS:
                     name: default.test2
                   name: default.test2
             Truncated Path -> Alias:
-              -mr-10003default.test2{ds=01_10_10, hr=01} [test2]
-              -mr-10004default.test2{ds=01_10_20, hr=02} [test2]
-              -mr-10005default.test2{ds=1, hr=1} [test2]
-              -mr-10006default.test2{ds=1, hr=2} [test2]
-              -mr-10007default.test2{ds=1, hr=3} [test2]
+              -mr-10004default.test2{ds=01_10_10, hr=01} [test2]
+              -mr-10005default.test2{ds=01_10_20, hr=02} [test2]
+              -mr-10006default.test2{ds=1, hr=1} [test2]
+              -mr-10007default.test2{ds=1, hr=2} [test2]
+              -mr-10008default.test2{ds=1, hr=3} [test2]
         Reducer 2 
             Needs Tagging: false
             Reduce Operator Tree:

http://git-wip-us.apache.org/repos/asf/hive/blob/ab98ffc2/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out b/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out
index bf9ba9b..f907ed7 100644
--- a/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out
+++ b/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out
@@ -274,9 +274,9 @@ STAGE PLANS:
                         tag: 0
                         auto parallelism: true
             Path -> Alias:
-              -mr-10002default.src{} [src]
+              -mr-10003default.src{} [src]
             Path -> Partition:
-              -mr-10002default.src{} 
+              -mr-10003default.src{} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -320,7 +320,7 @@ STAGE PLANS:
                     name: default.src
                   name: default.src
             Truncated Path -> Alias:
-              -mr-10002default.src{} [src]
+              -mr-10003default.src{} [src]
         Map 3 
             Map Operator Tree:
                 TableScan
@@ -341,12 +341,12 @@ STAGE PLANS:
                         value expressions: _col0 (type: string)
                         auto parallelism: false
             Path -> Alias:
-              -mr-10003default.srcpart{ds=2008-04-08, hr=11} [srcpart]
-              -mr-10004default.srcpart{ds=2008-04-08, hr=12} [srcpart]
-              -mr-10005default.srcpart{ds=2008-04-09, hr=11} [srcpart]
-              -mr-10006default.srcpart{ds=2008-04-09, hr=12} [srcpart]
+              -mr-10004default.srcpart{ds=2008-04-08, hr=11} [srcpart]
+              -mr-10005default.srcpart{ds=2008-04-08, hr=12} [srcpart]
+              -mr-10006default.srcpart{ds=2008-04-09, hr=11} [srcpart]
+              -mr-10007default.srcpart{ds=2008-04-09, hr=12} [srcpart]
             Path -> Partition:
-              -mr-10003default.srcpart{ds=2008-04-08, hr=11} 
+              -mr-10004default.srcpart{ds=2008-04-08, hr=11} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -391,7 +391,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.srcpart
                   name: default.srcpart
-              -mr-10004default.srcpart{ds=2008-04-08, hr=12} 
+              -mr-10005default.srcpart{ds=2008-04-08, hr=12} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -436,7 +436,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.srcpart
                   name: default.srcpart
-              -mr-10005default.srcpart{ds=2008-04-09, hr=11} 
+              -mr-10006default.srcpart{ds=2008-04-09, hr=11} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -481,7 +481,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.srcpart
                   name: default.srcpart
-              -mr-10006default.srcpart{ds=2008-04-09, hr=12} 
+              -mr-10007default.srcpart{ds=2008-04-09, hr=12} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -527,10 +527,10 @@ STAGE PLANS:
                     name: default.srcpart
                   name: default.srcpart
             Truncated Path -> Alias:
-              -mr-10003default.srcpart{ds=2008-04-08, hr=11} [srcpart]
-              -mr-10004default.srcpart{ds=2008-04-08, hr=12} [srcpart]
-              -mr-10005default.srcpart{ds=2008-04-09, hr=11} [srcpart]
-              -mr-10006default.srcpart{ds=2008-04-09, hr=12} [srcpart]
+              -mr-10004default.srcpart{ds=2008-04-08, hr=11} [srcpart]
+              -mr-10005default.srcpart{ds=2008-04-08, hr=12} [srcpart]
+              -mr-10006default.srcpart{ds=2008-04-09, hr=11} [srcpart]
+              -mr-10007default.srcpart{ds=2008-04-09, hr=12} [srcpart]
         Reducer 2 
             Needs Tagging: false
             Reduce Operator Tree:
@@ -695,9 +695,9 @@ STAGE PLANS:
                         value expressions: _col0 (type: bigint)
                         auto parallelism: false
             Path -> Alias:
-              -mr-10002default.src{} [src]
+              -mr-10003default.src{} [src]
             Path -> Partition:
-              -mr-10002default.src{} 
+              -mr-10003default.src{} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -741,7 +741,7 @@ STAGE PLANS:
                     name: default.src
                   name: default.src
             Truncated Path -> Alias:
-              -mr-10002default.src{} [src]
+              -mr-10003default.src{} [src]
         Map 4 
             Map Operator Tree:
                 TableScan
@@ -1132,9 +1132,9 @@ STAGE PLANS:
                         value expressions: _col0 (type: string)
                         auto parallelism: false
             Path -> Alias:
-              -mr-10002default.src{} [src]
+              -mr-10003default.src{} [src]
             Path -> Partition:
-              -mr-10002default.src{} 
+              -mr-10003default.src{} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1178,7 +1178,7 @@ STAGE PLANS:
                     name: default.src
                   name: default.src
             Truncated Path -> Alias:
-              -mr-10002default.src{} [src]
+              -mr-10003default.src{} [src]
         Map 3 
             Map Operator Tree:
                 TableScan
@@ -1199,12 +1199,12 @@ STAGE PLANS:
                         value expressions: _col0 (type: string)
                         auto parallelism: false
             Path -> Alias:
-              -mr-10003default.srcpart{ds=2008-04-08, hr=11} [srcpart]
-              -mr-10004default.srcpart{ds=2008-04-08, hr=12} [srcpart]
-              -mr-10005default.srcpart{ds=2008-04-09, hr=11} [srcpart]
-              -mr-10006default.srcpart{ds=2008-04-09, hr=12} [srcpart]
+              -mr-10004default.srcpart{ds=2008-04-08, hr=11} [srcpart]
+              -mr-10005default.srcpart{ds=2008-04-08, hr=12} [srcpart]
+              -mr-10006default.srcpart{ds=2008-04-09, hr=11} [srcpart]
+              -mr-10007default.srcpart{ds=2008-04-09, hr=12} [srcpart]
             Path -> Partition:
-              -mr-10003default.srcpart{ds=2008-04-08, hr=11} 
+              -mr-10004default.srcpart{ds=2008-04-08, hr=11} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1249,7 +1249,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.srcpart
                   name: default.srcpart
-              -mr-10004default.srcpart{ds=2008-04-08, hr=12} 
+              -mr-10005default.srcpart{ds=2008-04-08, hr=12} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1294,7 +1294,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.srcpart
                   name: default.srcpart
-              -mr-10005default.srcpart{ds=2008-04-09, hr=11} 
+              -mr-10006default.srcpart{ds=2008-04-09, hr=11} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1339,7 +1339,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.srcpart
                   name: default.srcpart
-              -mr-10006default.srcpart{ds=2008-04-09, hr=12} 
+              -mr-10007default.srcpart{ds=2008-04-09, hr=12} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1385,10 +1385,10 @@ STAGE PLANS:
                     name: default.srcpart
                   name: default.srcpart
             Truncated Path -> Alias:
-              -mr-10003default.srcpart{ds=2008-04-08, hr=11} [srcpart]
-              -mr-10004default.srcpart{ds=2008-04-08, hr=12} [srcpart]
-              -mr-10005default.srcpart{ds=2008-04-09, hr=11} [srcpart]
-              -mr-10006default.srcpart{ds=2008-04-09, hr=12} [srcpart]
+              -mr-10004default.srcpart{ds=2008-04-08, hr=11} [srcpart]
+              -mr-10005default.srcpart{ds=2008-04-08, hr=12} [srcpart]
+              -mr-10006default.srcpart{ds=2008-04-09, hr=11} [srcpart]
+              -mr-10007default.srcpart{ds=2008-04-09, hr=12} [srcpart]
         Reducer 2 
             Needs Tagging: false
             Reduce Operator Tree:
@@ -1645,9 +1645,9 @@ STAGE PLANS:
                       tag: 0
                       auto parallelism: true
             Path -> Alias:
-              -mr-10002default.src{} [src]
+              -mr-10003default.src{} [src]
             Path -> Partition:
-              -mr-10002default.src{} 
+              -mr-10003default.src{} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1691,7 +1691,7 @@ STAGE PLANS:
                     name: default.src
                   name: default.src
             Truncated Path -> Alias:
-              -mr-10002default.src{} [src]
+              -mr-10003default.src{} [src]
         Map 5 
             Map Operator Tree:
                 TableScan
@@ -1832,9 +1832,9 @@ STAGE PLANS:
                       value expressions: key (type: string)
                       auto parallelism: true
             Path -> Alias:
-              -mr-10002default.src{} [s1]
+              -mr-10003default.src{} [s1]
             Path -> Partition:
-              -mr-10002default.src{} 
+              -mr-10003default.src{} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1878,7 +1878,7 @@ STAGE PLANS:
                     name: default.src
                   name: default.src
             Truncated Path -> Alias:
-              -mr-10002default.src{} [s1]
+              -mr-10003default.src{} [s1]
         Map 3 
             Map Operator Tree:
                 TableScan
@@ -1898,9 +1898,9 @@ STAGE PLANS:
                       value expressions: key (type: string)
                       auto parallelism: true
             Path -> Alias:
-              -mr-10003default.src{} [s2]
+              -mr-10004default.src{} [s2]
             Path -> Partition:
-              -mr-10003default.src{} 
+              -mr-10004default.src{} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1944,7 +1944,7 @@ STAGE PLANS:
                     name: default.src
                   name: default.src
             Truncated Path -> Alias:
-              -mr-10003default.src{} [s2]
+              -mr-10004default.src{} [s2]
         Reducer 2 
             Needs Tagging: false
             Reduce Operator Tree:
@@ -2059,9 +2059,9 @@ STAGE PLANS:
                         value expressions: _col0 (type: bigint)
                         auto parallelism: false
             Path -> Alias:
-              -mr-10002default.src{} [src]
+              -mr-10003default.src{} [src]
             Path -> Partition:
-              -mr-10002default.src{} 
+              -mr-10003default.src{} 
                 Partition
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -2105,7 +2105,7 @@ STAGE PLANS:
                     name: default.src
                   name: default.src
             Truncated Path -> Alias:
-              -mr-10002default.src{} [src]
+              -mr-10003default.src{} [src]
         Reducer 2 
             Needs Tagging: false
             Reduce Operator Tree:

http://git-wip-us.apache.org/repos/asf/hive/blob/ab98ffc2/ql/src/test/results/clientpositive/tez/stats_counter.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/stats_counter.q.out b/ql/src/test/results/clientpositive/tez/stats_counter.q.out
deleted file mode 100644
index 8b3dcea..0000000
--- a/ql/src/test/results/clientpositive/tez/stats_counter.q.out
+++ /dev/null
@@ -1,102 +0,0 @@
-PREHOOK: query: -- by analyze
-create table dummy1 as select * from src
-PREHOOK: type: CREATETABLE_AS_SELECT
-PREHOOK: Input: default@src
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dummy1
-POSTHOOK: query: -- by analyze
-create table dummy1 as select * from src
-POSTHOOK: type: CREATETABLE_AS_SELECT
-POSTHOOK: Input: default@src
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dummy1
-PREHOOK: query: analyze table dummy1 compute statistics
-PREHOOK: type: QUERY
-PREHOOK: Input: default@dummy1
-PREHOOK: Output: default@dummy1
-POSTHOOK: query: analyze table dummy1 compute statistics
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@dummy1
-POSTHOOK: Output: default@dummy1
-PREHOOK: query: desc formatted dummy1
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy1
-POSTHOOK: query: desc formatted dummy1
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy1
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Detailed Table Information	 	 
-Database:           	default             	 
-#### A masked pattern was here ####
-Retention:          	0                   	 
-#### A masked pattern was here ####
-Table Type:         	MANAGED_TABLE       	 
-Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: -- by autogather
-create table dummy2 as select * from src
-PREHOOK: type: CREATETABLE_AS_SELECT
-PREHOOK: Input: default@src
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dummy2
-POSTHOOK: query: -- by autogather
-create table dummy2 as select * from src
-POSTHOOK: type: CREATETABLE_AS_SELECT
-POSTHOOK: Input: default@src
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dummy2
-PREHOOK: query: desc formatted dummy2
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy2
-POSTHOOK: query: desc formatted dummy2
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy2
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Detailed Table Information	 	 
-Database:           	default             	 
-#### A masked pattern was here ####
-Retention:          	0                   	 
-#### A masked pattern was here ####
-Table Type:         	MANAGED_TABLE       	 
-Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   

http://git-wip-us.apache.org/repos/asf/hive/blob/ab98ffc2/ql/src/test/results/clientpositive/tez/stats_counter_partitioned.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/stats_counter_partitioned.q.out b/ql/src/test/results/clientpositive/tez/stats_counter_partitioned.q.out
deleted file mode 100644
index 626dcff..0000000
--- a/ql/src/test/results/clientpositive/tez/stats_counter_partitioned.q.out
+++ /dev/null
@@ -1,465 +0,0 @@
-PREHOOK: query: -- partitioned table analyze 
-
-create table dummy (key string, value string) partitioned by (ds string, hr string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dummy
-POSTHOOK: query: -- partitioned table analyze 
-
-create table dummy (key string, value string) partitioned by (ds string, hr string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dummy
-PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table dummy partition (ds='2008',hr='12')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@dummy
-POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table dummy partition (ds='2008',hr='12')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@dummy
-POSTHOOK: Output: default@dummy@ds=2008/hr=12
-PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table dummy partition (ds='2008',hr='11')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@dummy
-POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table dummy partition (ds='2008',hr='11')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@dummy
-POSTHOOK: Output: default@dummy@ds=2008/hr=11
-PREHOOK: query: analyze table dummy partition (ds,hr) compute statistics
-PREHOOK: type: QUERY
-PREHOOK: Input: default@dummy
-PREHOOK: Input: default@dummy@ds=2008/hr=11
-PREHOOK: Input: default@dummy@ds=2008/hr=12
-PREHOOK: Output: default@dummy
-PREHOOK: Output: default@dummy@ds=2008/hr=11
-PREHOOK: Output: default@dummy@ds=2008/hr=12
-POSTHOOK: query: analyze table dummy partition (ds,hr) compute statistics
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@dummy
-POSTHOOK: Input: default@dummy@ds=2008/hr=11
-POSTHOOK: Input: default@dummy@ds=2008/hr=12
-POSTHOOK: Output: default@dummy
-POSTHOOK: Output: default@dummy@ds=2008/hr=11
-POSTHOOK: Output: default@dummy@ds=2008/hr=12
-PREHOOK: query: describe formatted dummy partition (ds='2008', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (ds='2008', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008, 11]          	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: describe formatted dummy partition (ds='2008', hr='12')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (ds='2008', hr='12')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008, 12]          	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: drop table dummy
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@dummy
-PREHOOK: Output: default@dummy
-POSTHOOK: query: drop table dummy
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@dummy
-POSTHOOK: Output: default@dummy
-PREHOOK: query: -- static partitioned table on insert
-
-create table dummy (key string, value string) partitioned by (ds string, hr string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dummy
-POSTHOOK: query: -- static partitioned table on insert
-
-create table dummy (key string, value string) partitioned by (ds string, hr string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dummy
-PREHOOK: query: insert overwrite table dummy partition (ds='10',hr='11') select * from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@dummy@ds=10/hr=11
-POSTHOOK: query: insert overwrite table dummy partition (ds='10',hr='11') select * from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@dummy@ds=10/hr=11
-POSTHOOK: Lineage: dummy PARTITION(ds=10,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: dummy PARTITION(ds=10,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: insert overwrite table dummy partition (ds='10',hr='12') select * from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@dummy@ds=10/hr=12
-POSTHOOK: query: insert overwrite table dummy partition (ds='10',hr='12') select * from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@dummy@ds=10/hr=12
-POSTHOOK: Lineage: dummy PARTITION(ds=10,hr=12).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: dummy PARTITION(ds=10,hr=12).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: describe formatted dummy partition (ds='10', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (ds='10', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[10, 11]            	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: describe formatted dummy partition (ds='10', hr='12')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (ds='10', hr='12')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[10, 12]            	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: drop table dummy
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@dummy
-PREHOOK: Output: default@dummy
-POSTHOOK: query: drop table dummy
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@dummy
-POSTHOOK: Output: default@dummy
-PREHOOK: query: -- dynamic partitioned table on insert
-
-create table dummy (key int) partitioned by (hr int)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dummy
-POSTHOOK: query: -- dynamic partitioned table on insert
-
-create table dummy (key int) partitioned by (hr int)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dummy
-PREHOOK: query: CREATE TABLE tbl(key int, value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@tbl
-POSTHOOK: query: CREATE TABLE tbl(key int, value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@tbl
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tbl.txt' OVERWRITE INTO TABLE tbl
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@tbl
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tbl.txt' OVERWRITE INTO TABLE tbl
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@tbl
-PREHOOK: query: insert overwrite table dummy partition (hr) select * from tbl
-PREHOOK: type: QUERY
-PREHOOK: Input: default@tbl
-PREHOOK: Output: default@dummy
-POSTHOOK: query: insert overwrite table dummy partition (hr) select * from tbl
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@tbl
-POSTHOOK: Output: default@dummy@hr=1994
-POSTHOOK: Output: default@dummy@hr=1996
-POSTHOOK: Output: default@dummy@hr=1997
-POSTHOOK: Output: default@dummy@hr=1998
-POSTHOOK: Lineage: dummy PARTITION(hr=1994).key SIMPLE [(tbl)tbl.FieldSchema(name:key, type:int, comment:null), ]
-POSTHOOK: Lineage: dummy PARTITION(hr=1996).key SIMPLE [(tbl)tbl.FieldSchema(name:key, type:int, comment:null), ]
-POSTHOOK: Lineage: dummy PARTITION(hr=1997).key SIMPLE [(tbl)tbl.FieldSchema(name:key, type:int, comment:null), ]
-POSTHOOK: Lineage: dummy PARTITION(hr=1998).key SIMPLE [(tbl)tbl.FieldSchema(name:key, type:int, comment:null), ]
-PREHOOK: query: describe formatted dummy partition (hr=1997)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (hr=1997)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-hr                  	int                 	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1997]              	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	6                   
-	rawDataSize         	6                   
-	totalSize           	12                  
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: describe formatted dummy partition (hr=1994)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (hr=1994)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-hr                  	int                 	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1994]              	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	1                   
-	rawDataSize         	1                   
-	totalSize           	2                   
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: describe formatted dummy partition (hr=1998)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (hr=1998)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-hr                  	int                 	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1998]              	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	2                   
-	rawDataSize         	2                   
-	totalSize           	4                   
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: describe formatted dummy partition (hr=1996)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@dummy
-POSTHOOK: query: describe formatted dummy partition (hr=1996)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@dummy
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-hr                  	int                 	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1996]              	 
-Database:           	default             	 
-Table:              	dummy               	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	1                   
-	rawDataSize         	1                   
-	totalSize           	2                   
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: drop table tbl
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@tbl
-PREHOOK: Output: default@tbl
-POSTHOOK: query: drop table tbl
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@tbl
-POSTHOOK: Output: default@tbl
-PREHOOK: query: drop table dummy
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@dummy
-PREHOOK: Output: default@dummy
-POSTHOOK: query: drop table dummy
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@dummy
-POSTHOOK: Output: default@dummy