You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by px...@apache.org on 2016/05/09 17:41:43 UTC

[01/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Repository: hive
Updated Branches:
  refs/heads/master 2a8e38814 -> 244ce09c9


http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/unicode_notation.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/unicode_notation.q.out b/ql/src/test/results/clientpositive/unicode_notation.q.out
index 52da674..37848b0 100644
--- a/ql/src/test/results/clientpositive/unicode_notation.q.out
+++ b/ql/src/test/results/clientpositive/unicode_notation.q.out
@@ -27,6 +27,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -73,6 +78,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -119,6 +129,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/unset_table_view_property.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/unset_table_view_property.q.out b/ql/src/test/results/clientpositive/unset_table_view_property.q.out
index 8bbb9fe..a3dec73 100644
--- a/ql/src/test/results/clientpositive/unset_table_view_property.q.out
+++ b/ql/src/test/results/clientpositive/unset_table_view_property.q.out
@@ -16,6 +16,11 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
+numFiles	0
+numRows	0
+rawDataSize	0
+totalSize	0
 #### A masked pattern was here ####
 PREHOOK: query: -- UNSET TABLE PROPERTIES
 ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'c'='3')
@@ -35,6 +40,8 @@ a	1
 c	3
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 totalSize	0
 #### A masked pattern was here ####
 PREHOOK: query: -- UNSET all the properties
@@ -53,6 +60,8 @@ POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 totalSize	0
 #### A masked pattern was here ####
 PREHOOK: query: ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'c'='3', 'd'='4')
@@ -72,6 +81,8 @@ c	3
 d	4
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 totalSize	0
 #### A masked pattern was here ####
 PREHOOK: query: -- UNSET a subset of the properties
@@ -91,6 +102,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES
 c	3
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 totalSize	0
 #### A masked pattern was here ####
 PREHOOK: query: -- the same property being UNSET multiple times
@@ -109,6 +122,8 @@ POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 totalSize	0
 #### A masked pattern was here ####
 PREHOOK: query: ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'b' = '2', 'c'='3', 'd'='4')
@@ -129,6 +144,8 @@ c	3
 d	4
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 totalSize	0
 #### A masked pattern was here ####
 PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS
@@ -149,6 +166,8 @@ a	1
 c	3
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 totalSize	0
 #### A masked pattern was here ####
 PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS
@@ -168,6 +187,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES
 a	1
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 totalSize	0
 #### A masked pattern was here ####
 PREHOOK: query: DROP TABLE vt.testTable

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/vectorized_ptf.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_ptf.q.out b/ql/src/test/results/clientpositive/vectorized_ptf.q.out
index fc4351d..7fb966a 100644
--- a/ql/src/test/results/clientpositive/vectorized_ptf.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_ptf.q.out
@@ -5354,15 +5354,20 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                       bucket_count -1
                       columns p_mfgr,p_name,p_size,r,dr,s
                       columns.comments 
                       columns.types string:string:int:int:int:double
 #### A masked pattern was here ####
                       name default.part_4
+                      numFiles 0
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct part_4 { string p_mfgr, string p_name, i32 p_size, i32 r, i32 dr, double s}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 0
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.part_4
@@ -5379,15 +5384,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns p_mfgr,p_name,p_size,r,dr,s
                 columns.comments 
                 columns.types string:string:int:int:int:double
 #### A masked pattern was here ####
                 name default.part_4
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct part_4 { string p_mfgr, string p_name, i32 p_size, i32 r, i32 dr, double s}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.part_4
@@ -5584,15 +5594,20 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                       bucket_count -1
                       columns p_mfgr,p_name,p_size,s2,r,dr,cud,fv1
                       columns.comments 
                       columns.types string:string:int:int:int:int:double:int
 #### A masked pattern was here ####
                       name default.part_5
+                      numFiles 0
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct part_5 { string p_mfgr, string p_name, i32 p_size, i32 s2, i32 r, i32 dr, double cud, i32 fv1}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 0
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.part_5
@@ -5609,15 +5624,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns p_mfgr,p_name,p_size,s2,r,dr,cud,fv1
                 columns.comments 
                 columns.types string:string:int:int:int:int:double:int
 #### A masked pattern was here ####
                 name default.part_5
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct part_5 { string p_mfgr, string p_name, i32 p_size, i32 s2, i32 r, i32 dr, double cud, i32 fv1}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.part_5


[04/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_2.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_2.q.out
index c283738..5d93fdc 100644
--- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_2.q.out
+++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_2.q.out
@@ -159,8 +159,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -204,8 +206,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -277,8 +281,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -438,8 +444,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -483,8 +491,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -556,8 +566,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_3.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_3.q.out
index 351fd20..15e9ef8 100644
--- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_3.q.out
@@ -143,8 +143,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -188,8 +190,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -277,8 +281,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -436,8 +442,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -508,8 +516,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -553,8 +563,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -713,8 +725,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -785,8 +799,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -830,8 +846,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_4.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_4.q.out
index 2d9cdf8..f43cad2 100644
--- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_4.q.out
+++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_4.q.out
@@ -159,8 +159,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -204,8 +206,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -293,8 +297,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -452,8 +458,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -524,8 +532,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -569,8 +579,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -729,8 +741,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -801,8 +815,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -846,8 +862,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_5.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_5.q.out
index 8bc203a..3f019bc 100644
--- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_5.q.out
+++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_5.q.out
@@ -119,6 +119,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -138,6 +140,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.bucket_small
                       numFiles 4
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct bucket_small { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -198,6 +202,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -217,6 +223,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.bucket_big
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct bucket_big { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -322,6 +330,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -341,6 +351,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.bucket_small
                       numFiles 4
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct bucket_small { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -401,6 +413,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -420,6 +434,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.bucket_big
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct bucket_big { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -551,6 +567,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -570,6 +588,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.bucket_big
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct bucket_big { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -620,6 +640,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -639,6 +661,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.bucket_small
                       numFiles 4
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct bucket_small { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_7.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_7.q.out
index 18fc95c..cb17d38 100644
--- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_7.q.out
+++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_7.q.out
@@ -176,8 +176,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -221,8 +223,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -310,8 +314,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -355,8 +361,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -517,8 +525,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -562,8 +572,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -635,8 +647,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -680,8 +694,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -842,8 +858,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -887,8 +905,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -960,8 +980,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1005,8 +1027,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_8.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_8.q.out
index 9a1010c..d24b3b2 100644
--- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_8.q.out
+++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_8.q.out
@@ -176,8 +176,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -221,8 +223,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -310,8 +314,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -355,8 +361,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -517,8 +525,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -562,8 +572,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -635,8 +647,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -680,8 +694,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -844,8 +860,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -889,8 +907,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -962,8 +982,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1007,8 +1029,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/bucket2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/bucket2.q.out b/ql/src/test/results/clientpositive/tez/bucket2.q.out
index e9432a8..800edf3 100644
--- a/ql/src/test/results/clientpositive/tez/bucket2.q.out
+++ b/ql/src/test/results/clientpositive/tez/bucket2.q.out
@@ -117,6 +117,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count 2
                         bucket_field_name key
                         columns key,value
@@ -124,9 +125,13 @@ STAGE PLANS:
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.bucket2_1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct bucket2_1 { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.bucket2_1
@@ -146,6 +151,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -153,9 +159,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucket2_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket2_1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket2_1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/bucket4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/bucket4.q.out b/ql/src/test/results/clientpositive/tez/bucket4.q.out
index a0f1177..4291e44 100644
--- a/ql/src/test/results/clientpositive/tez/bucket4.q.out
+++ b/ql/src/test/results/clientpositive/tez/bucket4.q.out
@@ -114,6 +114,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         SORTBUCKETCOLSPREFIX TRUE
                         bucket_count 2
                         bucket_field_name key
@@ -122,9 +123,13 @@ STAGE PLANS:
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.bucket4_1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct bucket4_1 { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.bucket4_1
@@ -144,6 +149,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 2
                 bucket_field_name key
@@ -152,9 +158,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucket4_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket4_1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket4_1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/column_names_with_leading_and_trailing_spaces.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/column_names_with_leading_and_trailing_spaces.q.out b/ql/src/test/results/clientpositive/tez/column_names_with_leading_and_trailing_spaces.q.out
index 46c285e..18314b5 100644
--- a/ql/src/test/results/clientpositive/tez/column_names_with_leading_and_trailing_spaces.q.out
+++ b/ql/src/test/results/clientpositive/tez/column_names_with_leading_and_trailing_spaces.q.out
@@ -25,6 +25,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out b/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out
index bb62e1f..fb71214 100644
--- a/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out
+++ b/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out
@@ -113,6 +113,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count 2
                         bucket_field_name key
                         columns key,value
@@ -120,9 +121,13 @@ STAGE PLANS:
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.bucket2_1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct bucket2_1 { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.bucket2_1
@@ -142,6 +147,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -149,9 +155,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucket2_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket2_1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket2_1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/explainuser_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/explainuser_1.q.out b/ql/src/test/results/clientpositive/tez/explainuser_1.q.out
index bdb8830..965577e 100644
--- a/ql/src/test/results/clientpositive/tez/explainuser_1.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainuser_1.q.out
@@ -186,9 +186,9 @@ Stage-0
           SHUFFLE [RS_5]
             Group By Operator [GBY_4] (rows=1 width=8)
               Output:["_col0"],aggregations:["count(1)"]
-              Select Operator [SEL_2] (rows=1 width=2515)
-                TableScan [TS_0] (rows=1 width=2515)
-                  default@src_orc_merge_test_part,src_orc_merge_test_part,Tbl:PARTIAL,Col:NONE
+              Select Operator [SEL_2] (rows=500 width=94)
+                TableScan [TS_0] (rows=500 width=94)
+                  default@src_orc_merge_test_part,src_orc_merge_test_part,Tbl:COMPLETE,Col:NONE
 
 PREHOOK: query: explain select sum(hash(key)), sum(hash(value)) from src_orc_merge_test_part where ds='2012-01-03' and ts='2012-01-03+14:46:31'
 PREHOOK: type: QUERY
@@ -211,9 +211,9 @@ Stage-0
           SHUFFLE [RS_5]
             Group By Operator [GBY_4] (rows=1 width=16)
               Output:["_col0","_col1"],aggregations:["sum(_col0)","sum(_col1)"]
-              Select Operator [SEL_2] (rows=24 width=104)
+              Select Operator [SEL_2] (rows=500 width=94)
                 Output:["_col0","_col1"]
-                TableScan [TS_0] (rows=24 width=104)
+                TableScan [TS_0] (rows=500 width=94)
                   default@src_orc_merge_test_part,src_orc_merge_test_part,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
 
 PREHOOK: query: drop table src_orc_merge_test_part
@@ -3238,8 +3238,8 @@ Stage-0
           Output:["_col0"],aggregations:["count(1)"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_3]
-            Select Operator [SEL_1] (rows=1 width=171)
-              TableScan [TS_0] (rows=1 width=171)
+            Select Operator [SEL_1] (rows=5 width=6)
+              TableScan [TS_0] (rows=5 width=6)
                 default@tgt_rc_merge_test,tgt_rc_merge_test,Tbl:COMPLETE,Col:COMPLETE
 
 PREHOOK: query: explain select sum(hash(key)), sum(hash(value)) from tgt_rc_merge_test
@@ -3261,9 +3261,9 @@ Stage-0
           Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_3]
-            Select Operator [SEL_1] (rows=1 width=171)
+            Select Operator [SEL_1] (rows=5 width=6)
               Output:["_col0","_col1"]
-              TableScan [TS_0] (rows=1 width=171)
+              TableScan [TS_0] (rows=5 width=6)
                 default@tgt_rc_merge_test,tgt_rc_merge_test,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
 
 PREHOOK: query: drop table src_rc_merge_test

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/metadataonly1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/metadataonly1.q.out b/ql/src/test/results/clientpositive/tez/metadataonly1.q.out
index e43a7e4..15f5ed5 100644
--- a/ql/src/test/results/clientpositive/tez/metadataonly1.q.out
+++ b/ql/src/test/results/clientpositive/tez/metadataonly1.q.out
@@ -145,17 +145,22 @@ STAGE PLANS:
                   partition values:
                     ds 1
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test1
+                    numFiles 0
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct test1 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.NullStructSerDe
                 
@@ -276,17 +281,22 @@ STAGE PLANS:
                   partition values:
                     ds 1
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test1
+                    numFiles 0
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct test1 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.NullStructSerDe
                 
@@ -407,17 +417,22 @@ STAGE PLANS:
                   partition values:
                     ds 1
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test1
+                    numFiles 0
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct test1 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -545,17 +560,22 @@ STAGE PLANS:
                   partition values:
                     ds 1
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test1
+                    numFiles 0
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct test1 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -585,17 +605,22 @@ STAGE PLANS:
                   partition values:
                     ds 2
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test1
+                    numFiles 0
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct test1 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -653,17 +678,22 @@ STAGE PLANS:
                   partition values:
                     ds 1
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test1
+                    numFiles 0
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct test1 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -693,17 +723,22 @@ STAGE PLANS:
                   partition values:
                     ds 2
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test1
+                    numFiles 0
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct test1 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -918,17 +953,22 @@ STAGE PLANS:
                     ds 1
                     hr 1
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test2
+                    numFiles 0
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct test2 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.NullStructSerDe
                 
@@ -958,17 +998,22 @@ STAGE PLANS:
                     ds 1
                     hr 2
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test2
+                    numFiles 0
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct test2 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.NullStructSerDe
                 
@@ -998,17 +1043,22 @@ STAGE PLANS:
                     ds 1
                     hr 3
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test2
+                    numFiles 0
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct test2 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.NullStructSerDe
                 
@@ -1140,17 +1190,22 @@ STAGE PLANS:
                     ds 1
                     hr 1
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test2
+                    numFiles 0
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct test2 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -1181,17 +1236,22 @@ STAGE PLANS:
                     ds 1
                     hr 2
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test2
+                    numFiles 0
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct test2 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -1222,17 +1282,22 @@ STAGE PLANS:
                     ds 1
                     hr 3
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test2
+                    numFiles 0
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct test2 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -1359,17 +1424,22 @@ STAGE PLANS:
                   partition values:
                     ds 1
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test1
+                    numFiles 0
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct test1 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.NullStructSerDe
                 
@@ -1398,17 +1468,22 @@ STAGE PLANS:
                   partition values:
                     ds 2
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test1
+                    numFiles 0
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct test1 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.NullStructSerDe
                 
@@ -1589,17 +1664,22 @@ STAGE PLANS:
                     ds 01:10:10
                     hr 01
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test2
+                    numFiles 0
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct test2 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.NullStructSerDe
                 
@@ -1629,17 +1709,22 @@ STAGE PLANS:
                     ds 01:10:20
                     hr 02
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test2
+                    numFiles 0
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct test2 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.NullStructSerDe
                 
@@ -1669,17 +1754,22 @@ STAGE PLANS:
                     ds 1
                     hr 1
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test2
+                    numFiles 0
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct test2 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.NullStructSerDe
                 
@@ -1709,17 +1799,22 @@ STAGE PLANS:
                     ds 1
                     hr 2
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test2
+                    numFiles 0
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct test2 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.NullStructSerDe
                 
@@ -1749,17 +1844,22 @@ STAGE PLANS:
                     ds 1
                     hr 3
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns a,b
                     columns.comments 
                     columns.types int:double
 #### A masked pattern was here ####
                     name default.test2
+                    numFiles 0
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct test2 { i32 a, double b}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.NullStructSerDe
                 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/sample1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/sample1.q.out b/ql/src/test/results/clientpositive/tez/sample1.q.out
index 009969e..2120a1ff 100644
--- a/ql/src/test/results/clientpositive/tez/sample1.q.out
+++ b/ql/src/test/results/clientpositive/tez/sample1.q.out
@@ -54,15 +54,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value,dt,hr
                               columns.comments 
                               columns.types int:string:string:string
 #### A masked pattern was here ####
                               name default.dest1
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest1
@@ -133,15 +138,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,dt,hr
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1


[18/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin5.q.out b/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
index 1bcb382..9494681 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
@@ -251,15 +251,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value1,value2
                           columns.comments 
                           columns.types string:string:string
 #### A masked pattern was here ####
                           name default.bucketmapjoin_tmp_result
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.bucketmapjoin_tmp_result
@@ -287,8 +292,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -331,8 +338,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -381,15 +390,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -412,15 +426,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -436,30 +455,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -481,15 +510,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -505,30 +539,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -777,8 +821,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_2
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -821,8 +867,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_2
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin7.q.out b/ql/src/test/results/clientpositive/bucketmapjoin7.q.out
index 76b13f3..6597fb5 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin7.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin7.q.out
@@ -96,8 +96,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -203,8 +205,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 2
+              numRows 0
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin8.q.out b/ql/src/test/results/clientpositive/bucketmapjoin8.q.out
index 097eaad..5ce0ae3 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin8.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin8.q.out
@@ -101,8 +101,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -205,8 +207,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 2
+              numRows 0
               partition_columns part
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -339,8 +343,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -443,8 +449,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 2
+              numRows 0
               partition_columns part
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin9.q.out b/ql/src/test/results/clientpositive/bucketmapjoin9.q.out
index 144d86d..9c4a0ca 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin9.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin9.q.out
@@ -109,8 +109,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 3
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -205,8 +207,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 2
+              numRows 0
               partition_columns part
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -372,8 +376,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -468,8 +474,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 2
+              numRows 0
               partition_columns part
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
index d108575..3b6c895 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
@@ -108,8 +108,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 3
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -187,15 +189,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value1,value2
                           columns.comments 
                           columns.types string:string:string
 #### A masked pattern was here ####
                           name default.bucketmapjoin_tmp_result
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.bucketmapjoin_tmp_result
@@ -221,6 +228,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -239,6 +248,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.srcbucket_mapjoin
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -268,15 +279,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -299,15 +315,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -323,30 +344,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -368,15 +399,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -392,30 +428,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
index 9c6a226..4c554bf 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
@@ -117,8 +117,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -159,8 +161,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -246,15 +250,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value1,value2
                           columns.comments 
                           columns.types string:string:string
 #### A masked pattern was here ####
                           name default.bucketmapjoin_tmp_result
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.bucketmapjoin_tmp_result
@@ -280,6 +289,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -298,6 +309,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.srcbucket_mapjoin
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -327,15 +340,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -358,15 +376,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -382,30 +405,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -427,15 +460,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -451,30 +489,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out
index 3078854..ed107a3 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out
@@ -250,6 +250,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.test1
               numFiles 3
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct test1 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -269,6 +271,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.test1
                 numFiles 3
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct test1 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -392,6 +396,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.test2
               numFiles 3
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct test2 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -411,6 +417,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.test2
                 numFiles 3
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct test2 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -528,6 +536,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.test1
               numFiles 3
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct test1 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -547,6 +557,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.test1
                 numFiles 3
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct test1 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -662,6 +674,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.test1
               numFiles 3
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct test1 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -681,6 +695,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.test1
                 numFiles 3
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct test1 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -796,6 +812,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.test1
               numFiles 3
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct test1 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -815,6 +833,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.test1
                 numFiles 3
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct test1 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -930,6 +950,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.test1
               numFiles 3
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct test1 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -949,6 +971,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.test1
                 numFiles 3
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct test1 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1064,6 +1088,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.test2
               numFiles 3
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct test2 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1083,6 +1109,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.test2
                 numFiles 3
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct test2 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1198,6 +1226,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.test2
               numFiles 3
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct test2 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1217,6 +1247,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.test2
                 numFiles 3
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct test2 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1332,6 +1364,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.test3
               numFiles 3
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct test3 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1351,6 +1385,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.test3
                 numFiles 3
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct test3 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out
index 8c9664d..d812193 100644
--- a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out
+++ b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out
@@ -909,6 +909,8 @@ Retention:          	0
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
 	numFiles            	1                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	5812                
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out b/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out
index 46c285e..18314b5 100644
--- a/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out
+++ b/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out
@@ -25,6 +25,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/columnstats_partlvl.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/columnstats_partlvl.q.out b/ql/src/test/results/clientpositive/columnstats_partlvl.q.out
index 4edf39a..8587ed3 100644
--- a/ql/src/test/results/clientpositive/columnstats_partlvl.q.out
+++ b/ql/src/test/results/clientpositive/columnstats_partlvl.q.out
@@ -134,8 +134,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.employee_part
               numFiles 1
+              numRows 0
               partition_columns employeesalary
               partition_columns.types double
+              rawDataSize 0
               serialization.ddl struct employee_part { i32 employeeid, string employeename}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -318,8 +320,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.employee_part
               numFiles 1
+              numRows 0
               partition_columns employeesalary
               partition_columns.types double
+              rawDataSize 0
               serialization.ddl struct employee_part { i32 employeeid, string employeename}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out b/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out
index 669807d..8d280c1 100644
--- a/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out
+++ b/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out
@@ -130,6 +130,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.uservisits_web_text_none
               numFiles 1
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -148,6 +150,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.uservisits_web_text_none
                 numFiles 1
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -518,6 +522,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name dummydb.uservisits_in_dummy_db
               numFiles 1
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct uservisits_in_dummy_db { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -536,6 +542,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name dummydb.uservisits_in_dummy_db
                 numFiles 1
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct uservisits_in_dummy_db { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out b/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out
index c4f51a3..f68bcb1 100644
--- a/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out
+++ b/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out
@@ -35,6 +35,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -81,6 +86,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -127,6 +134,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -180,6 +189,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -226,6 +237,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -271,6 +284,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/create_like.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_like.q.out b/ql/src/test/results/clientpositive/create_like.q.out
index 9241b68..8666e02 100644
--- a/ql/src/test/results/clientpositive/create_like.q.out
+++ b/ql/src/test/results/clientpositive/create_like.q.out
@@ -24,6 +24,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -344,6 +349,8 @@ Table Parameters:
 	k2                  	v2                  
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -470,6 +477,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/create_like_view.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_like_view.q.out b/ql/src/test/results/clientpositive/create_like_view.q.out
index e2dc2c4..45fa4ef 100644
--- a/ql/src/test/results/clientpositive/create_like_view.q.out
+++ b/ql/src/test/results/clientpositive/create_like_view.q.out
@@ -52,6 +52,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/create_skewed_table1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_skewed_table1.q.out b/ql/src/test/results/clientpositive/create_skewed_table1.q.out
index 415bb77..fe5ea0f 100644
--- a/ql/src/test/results/clientpositive/create_skewed_table1.q.out
+++ b/ql/src/test/results/clientpositive/create_skewed_table1.q.out
@@ -40,6 +40,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -72,6 +77,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -105,6 +115,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/database_location.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/database_location.q.out b/ql/src/test/results/clientpositive/database_location.q.out
index 797177d..926db3a 100644
--- a/ql/src/test/results/clientpositive/database_location.q.out
+++ b/ql/src/test/results/clientpositive/database_location.q.out
@@ -43,6 +43,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -113,6 +118,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/default_file_format.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/default_file_format.q.out b/ql/src/test/results/clientpositive/default_file_format.q.out
index 3d5c20f..4e5f27d 100644
--- a/ql/src/test/results/clientpositive/default_file_format.q.out
+++ b/ql/src/test/results/clientpositive/default_file_format.q.out
@@ -59,6 +59,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -88,6 +93,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -117,6 +127,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/describe_comment_indent.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/describe_comment_indent.q.out b/ql/src/test/results/clientpositive/describe_comment_indent.q.out
index 3e0f45e..5a01de1 100644
--- a/ql/src/test/results/clientpositive/describe_comment_indent.q.out
+++ b/ql/src/test/results/clientpositive/describe_comment_indent.q.out
@@ -60,7 +60,12 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 	comment             	table comment\ntwo lines
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out b/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out
index df0b65e..703fa14 100644
--- a/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out
+++ b/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out
@@ -55,6 +55,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/describe_formatted_view_partitioned.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/describe_formatted_view_partitioned.q.out b/ql/src/test/results/clientpositive/describe_formatted_view_partitioned.q.out
index 2c8b0b0..978df22 100644
--- a/ql/src/test/results/clientpositive/describe_formatted_view_partitioned.q.out
+++ b/ql/src/test/results/clientpositive/describe_formatted_view_partitioned.q.out
@@ -56,6 +56,11 @@ Database:           	default
 Table:              	view_partitioned    	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 PREHOOK: query: DROP VIEW view_partitioned
 PREHOOK: type: DROPVIEW

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/describe_syntax.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/describe_syntax.q.out b/ql/src/test/results/clientpositive/describe_syntax.q.out
index d0c75ed..7d012ca 100644
--- a/ql/src/test/results/clientpositive/describe_syntax.q.out
+++ b/ql/src/test/results/clientpositive/describe_syntax.q.out
@@ -317,6 +317,11 @@ Database:           	db1
 Table:              	t1                  	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -388,6 +393,11 @@ Database:           	db1
 Table:              	t1                  	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out b/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out
index c4dbf17..c9aed0d 100644
--- a/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out
+++ b/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out
@@ -105,6 +105,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 properties:
+                  COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                   bucket_count 2
                   bucket_field_name key
                   columns key,value
@@ -112,9 +113,13 @@ STAGE PLANS:
                   columns.types int:string
 #### A masked pattern was here ####
                   name default.bucket2_1
+                  numFiles 0
+                  numRows 0
+                  rawDataSize 0
                   serialization.ddl struct bucket2_1 { i32 key, string value}
                   serialization.format 1
                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  totalSize 0
 #### A masked pattern was here ####
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: default.bucket2_1
@@ -131,6 +136,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -138,9 +144,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucket2_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket2_1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket2_1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out b/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out
index 1028c26..42aeb6f 100644
--- a/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out
+++ b/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out
@@ -146,6 +146,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.uservisits_web_text_none
               numFiles 1
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -164,6 +166,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.uservisits_web_text_none
                 numFiles 1
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out b/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out
index 02ffbfb..32ade9c 100644
--- a/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out
+++ b/ql/src/test/results/clientpositive/encrypted/encryption_join_unencrypted_tbl.q.out
@@ -650,6 +650,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.src
               numFiles 1
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct src { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -667,6 +669,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.src
                 numFiles 1
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct src { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_map_ppr.q.out b/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
index 3d07b0d..84999f2 100644
--- a/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
+++ b/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
@@ -177,15 +177,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,c1,c2
                     columns.comments 
                     columns.types string:int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { string key, i32 c1, string c2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -202,15 +207,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,c1,c2
                 columns.comments 
                 columns.types string:int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { string key, i32 c1, string c2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
index 6689225..5cf8bb1 100644
--- a/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
+++ b/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
@@ -177,15 +177,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,c1,c2,c3,c4
                     columns.comments 
                     columns.types string:int:string:int:int
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -202,15 +207,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,c1,c2,c3,c4
                 columns.comments 
                 columns.types string:int:string:int:int
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/groupby_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_ppr.q.out b/ql/src/test/results/clientpositive/groupby_ppr.q.out
index 62fffbe..a15b557 100644
--- a/ql/src/test/results/clientpositive/groupby_ppr.q.out
+++ b/ql/src/test/results/clientpositive/groupby_ppr.q.out
@@ -170,15 +170,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,c1,c2
                     columns.comments 
                     columns.types string:int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { string key, i32 c1, string c2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -195,15 +200,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,c1,c2
                 columns.comments 
                 columns.types string:int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { string key, i32 c1, string c2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
index 2408056..117b2cd 100644
--- a/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
+++ b/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
@@ -170,15 +170,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,c1,c2,c3,c4
                     columns.comments 
                     columns.types string:int:string:int:int
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -195,15 +200,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,c1,c2,c3,c4
                 columns.comments 
                 columns.types string:int:string:int:int
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1


[12/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/schema_evol_orc_nonvec_mapwork_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/schema_evol_orc_nonvec_mapwork_table.q.out b/ql/src/test/results/clientpositive/schema_evol_orc_nonvec_mapwork_table.q.out
index f007ba8..92f875d 100644
--- a/ql/src/test/results/clientpositive/schema_evol_orc_nonvec_mapwork_table.q.out
+++ b/ql/src/test/results/clientpositive/schema_evol_orc_nonvec_mapwork_table.q.out
@@ -48,6 +48,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -183,24 +188,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_add_int_permute_select
-            Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), a (type: int), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -313,6 +318,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -451,24 +461,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_add_int_string_permute_select
-            Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), a (type: int), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -734,24 +744,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_double
-            Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: double), c2 (type: double), c3 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -905,24 +915,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_date_group_string_group_timestamp
-            Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: char(50)), _col3 (type: char(15)), _col4 (type: varchar(50)), _col5 (type: varchar(15)), _col6 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(15)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: varchar(15)), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1092,24 +1102,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_date_group_string_group_date
-            Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: char(50)), _col3 (type: char(15)), _col4 (type: varchar(50)), _col5 (type: varchar(15)), _col6 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(15)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: varchar(15)), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1259,24 +1269,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_string
-            Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1419,24 +1429,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_char
-            Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(50)), c3 (type: char(50)), c4 (type: char(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(50)), _col3 (type: char(50)), _col4 (type: char(50)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(50)), VALUE._col3 (type: char(50)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1579,24 +1589,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_char_trunc
-            Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(5)), c2 (type: char(5)), c3 (type: char(5)), c4 (type: char(5)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(5)), _col2 (type: char(5)), _col3 (type: char(5)), _col4 (type: char(5)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(5)), VALUE._col1 (type: char(5)), VALUE._col2 (type: char(5)), VALUE._col3 (type: char(5)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1739,24 +1749,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_varchar
-            Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(50)), c3 (type: varchar(50)), c4 (type: varchar(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(50)), _col3 (type: varchar(50)), _col4 (type: varchar(50)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(50)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1899,24 +1909,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_varchar_trunc
-            Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(5)), c2 (type: varchar(5)), c3 (type: varchar(5)), c4 (type: varchar(5)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(5)), _col2 (type: varchar(5)), _col3 (type: varchar(5)), _col4 (type: varchar(5)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(5)), VALUE._col1 (type: varchar(5)), VALUE._col2 (type: varchar(5)), VALUE._col3 (type: varchar(5)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2057,24 +2067,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_string
-            Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2215,24 +2225,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_char
-            Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(50)), c3 (type: char(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(50)), _col3 (type: char(50)), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(50)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2373,24 +2383,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_char_trunc
-            Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(7)), c2 (type: char(7)), c3 (type: char(7)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(7)), _col2 (type: char(7)), _col3 (type: char(7)), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(7)), VALUE._col1 (type: char(7)), VALUE._col2 (type: char(7)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2531,24 +2541,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_varchar
-            Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(50)), c3 (type: varchar(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(50)), _col3 (type: varchar(50)), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(50)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2689,24 +2699,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_varchar_trunc
-            Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(7)), c2 (type: varchar(7)), c3 (type: varchar(7)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(7)), _col2 (type: varchar(7)), _col3 (type: varchar(7)), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(7)), VALUE._col1 (type: varchar(7)), VALUE._col2 (type: varchar(7)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2857,24 +2867,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_string
-            Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: varchar(50)), c4 (type: char(9)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(9)), _col3 (type: varchar(50)), _col4 (type: char(9)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(9)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: char(9)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3015,24 +3025,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_char
-            Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(9)), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(9)), _col3 (type: string), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(9)), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3173,24 +3183,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_varchar
-            Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(9)), _col3 (type: string), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(9)), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3345,24 +3355,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_tinyint
-            Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: smallint), c2 (type: int), c3 (type: bigint), c4 (type: decimal(38,18)), c5 (type: float), c6 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-              Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: decimal(38,18)), _col5 (type: float), _col6 (type: double), _col7 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: smallint), VALUE._col1 (type: int), VALUE._col2 (type: bigint), VALUE._col3 (type: decimal(38,18)), VALUE._col4 (type: float), VALUE._col5 (type: double), VALUE._col6 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-          Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3507,24 +3517,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_smallint
-            Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: int), c2 (type: bigint), c3 (type: decimal(38,18)), c4 (type: float), c5 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: bigint), _col3 (type: decimal(38,18)), _col4 (type: float), _col5 (type: double), _col6 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: bigint), VALUE._col2 (type: decimal(38,18)), VALUE._col3 (type: float), VALUE._col4 (type: double), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3667,24 +3677,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_int
-            Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: bigint), c2 (type: decimal(38,18)), c3 (type: float), c4 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: bigint), _col2 (type: decimal(38,18)), _col3 (type: float), _col4 (type: double), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: decimal(38,18)), VALUE._col2 (type: float), VALUE._col3 (type: double), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3825,24 +3835,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_bigint
-            Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: decimal(38,18)), c2 (type: float), c3 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: decimal(38,18)), _col2 (type: float), _col3 (type: double), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: decimal(38,18)), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3981,24 +3991,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_decimal
-            Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: float), c2 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: float), _col2 (type: double), _col3 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: float), VALUE._col1 (type: double), VALUE._col2 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -4135,24 +4145,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_float
-            Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: double), _col2 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat


[13/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/schema_evol_orc_nonvec_fetchwork_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/schema_evol_orc_nonvec_fetchwork_table.q.out b/ql/src/test/results/clientpositive/schema_evol_orc_nonvec_fetchwork_table.q.out
index 912463b..cb58e7b 100644
--- a/ql/src/test/results/clientpositive/schema_evol_orc_nonvec_fetchwork_table.q.out
+++ b/ql/src/test/results/clientpositive/schema_evol_orc_nonvec_fetchwork_table.q.out
@@ -48,6 +48,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -183,24 +188,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_add_int_permute_select
-            Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), a (type: int), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -313,6 +318,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -451,24 +461,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_add_int_string_permute_select
-            Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), a (type: int), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -734,24 +744,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_double
-            Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: double), c2 (type: double), c3 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -905,24 +915,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_date_group_string_group_timestamp
-            Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: char(50)), _col3 (type: char(15)), _col4 (type: varchar(50)), _col5 (type: varchar(15)), _col6 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(15)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: varchar(15)), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1092,24 +1102,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_date_group_string_group_date
-            Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: char(50)), _col3 (type: char(15)), _col4 (type: varchar(50)), _col5 (type: varchar(15)), _col6 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(15)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: varchar(15)), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1259,24 +1269,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_string
-            Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1419,24 +1429,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_char
-            Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(50)), c3 (type: char(50)), c4 (type: char(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(50)), _col3 (type: char(50)), _col4 (type: char(50)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(50)), VALUE._col3 (type: char(50)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1579,24 +1589,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_char_trunc
-            Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(5)), c2 (type: char(5)), c3 (type: char(5)), c4 (type: char(5)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(5)), _col2 (type: char(5)), _col3 (type: char(5)), _col4 (type: char(5)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(5)), VALUE._col1 (type: char(5)), VALUE._col2 (type: char(5)), VALUE._col3 (type: char(5)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1739,24 +1749,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_varchar
-            Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(50)), c3 (type: varchar(50)), c4 (type: varchar(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(50)), _col3 (type: varchar(50)), _col4 (type: varchar(50)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(50)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1899,24 +1909,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_varchar_trunc
-            Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(5)), c2 (type: varchar(5)), c3 (type: varchar(5)), c4 (type: varchar(5)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(5)), _col2 (type: varchar(5)), _col3 (type: varchar(5)), _col4 (type: varchar(5)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(5)), VALUE._col1 (type: varchar(5)), VALUE._col2 (type: varchar(5)), VALUE._col3 (type: varchar(5)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2057,24 +2067,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_string
-            Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2215,24 +2225,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_char
-            Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(50)), c3 (type: char(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(50)), _col3 (type: char(50)), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(50)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2373,24 +2383,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_char_trunc
-            Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(7)), c2 (type: char(7)), c3 (type: char(7)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(7)), _col2 (type: char(7)), _col3 (type: char(7)), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(7)), VALUE._col1 (type: char(7)), VALUE._col2 (type: char(7)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2531,24 +2541,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_varchar
-            Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(50)), c3 (type: varchar(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(50)), _col3 (type: varchar(50)), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(50)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2689,24 +2699,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_varchar_trunc
-            Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(7)), c2 (type: varchar(7)), c3 (type: varchar(7)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(7)), _col2 (type: varchar(7)), _col3 (type: varchar(7)), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(7)), VALUE._col1 (type: varchar(7)), VALUE._col2 (type: varchar(7)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2857,24 +2867,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_string
-            Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: varchar(50)), c4 (type: char(9)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(9)), _col3 (type: varchar(50)), _col4 (type: char(9)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(9)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: char(9)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3015,24 +3025,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_char
-            Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(9)), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(9)), _col3 (type: string), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(9)), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3173,24 +3183,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_varchar
-            Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(9)), _col3 (type: string), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(9)), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3345,24 +3355,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_tinyint
-            Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: smallint), c2 (type: int), c3 (type: bigint), c4 (type: decimal(38,18)), c5 (type: float), c6 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-              Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: decimal(38,18)), _col5 (type: float), _col6 (type: double), _col7 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: smallint), VALUE._col1 (type: int), VALUE._col2 (type: bigint), VALUE._col3 (type: decimal(38,18)), VALUE._col4 (type: float), VALUE._col5 (type: double), VALUE._col6 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-          Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3507,24 +3517,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_smallint
-            Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: int), c2 (type: bigint), c3 (type: decimal(38,18)), c4 (type: float), c5 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: bigint), _col3 (type: decimal(38,18)), _col4 (type: float), _col5 (type: double), _col6 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: bigint), VALUE._col2 (type: decimal(38,18)), VALUE._col3 (type: float), VALUE._col4 (type: double), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3667,24 +3677,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_int
-            Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: bigint), c2 (type: decimal(38,18)), c3 (type: float), c4 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: bigint), _col2 (type: decimal(38,18)), _col3 (type: float), _col4 (type: double), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: decimal(38,18)), VALUE._col2 (type: float), VALUE._col3 (type: double), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3825,24 +3835,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_bigint
-            Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: decimal(38,18)), c2 (type: float), c3 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: decimal(38,18)), _col2 (type: float), _col3 (type: double), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: decimal(38,18)), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3981,24 +3991,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_decimal
-            Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: float), c2 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: float), _col2 (type: double), _col3 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: float), VALUE._col1 (type: double), VALUE._col2 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -4135,24 +4145,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_float
-            Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: double), _col2 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat


[15/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out
index d40a693..de1305f 100644
--- a/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out
@@ -439,6 +439,8 @@ Table:              	list_bucketing_dynamic_part
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	numFiles            	3                   
+	numRows             	984                 
+	rawDataSize         	9488                
 	totalSize           	10586               
 #### A masked pattern was here ####
 	 	 
@@ -555,8 +557,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.list_bucketing_dynamic_part
               numFiles 3
+              numRows 984
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 9488
               serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
@@ -585,16 +589,16 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: list_bucketing_dynamic_part
-          Statistics: Num rows: 16 Data size: 136 Basic stats: PARTIAL Column stats: NONE
+          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
           GatherStats: false
           Filter Operator
             isSamplingPred: false
             predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-            Statistics: Num rows: 4 Data size: 34 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 4 Data size: 34 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
               ListSink
 
 PREHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/mapjoin_memcheck.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/mapjoin_memcheck.q.out b/ql/src/test/results/clientpositive/mapjoin_memcheck.q.out
index 8f5492f..65d896b 100644
--- a/ql/src/test/results/clientpositive/mapjoin_memcheck.q.out
+++ b/ql/src/test/results/clientpositive/mapjoin_memcheck.q.out
@@ -44,14 +44,14 @@ STAGE PLANS:
         $hdt$_0:src1 
           TableScan
             alias: src1
-            Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: key is not null (type: boolean)
-              Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
                 HashTable Sink Operator
                   keys:
                     0 _col0 (type: string)
@@ -62,14 +62,14 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: src1
-            Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: key is not null (type: boolean)
-              Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
                 Map Join Operator
                   condition map:
                        Inner Join 0 to 1
@@ -77,10 +77,10 @@ STAGE PLANS:
                     0 _col0 (type: string)
                     1 _col0 (type: string)
                   outputColumnNames: _col0, _col1, _col2, _col3
-                  Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/metadataonly1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/metadataonly1.q.out b/ql/src/test/results/clientpositive/metadataonly1.q.out
index 0281407..4b4c57c 100644
--- a/ql/src/test/results/clientpositive/metadataonly1.q.out
+++ b/ql/src/test/results/clientpositive/metadataonly1.q.out
@@ -132,17 +132,22 @@ STAGE PLANS:
             partition values:
               ds 1
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test1
+              numFiles 0
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct test1 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -256,17 +261,22 @@ STAGE PLANS:
             partition values:
               ds 1
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test1
+              numFiles 0
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct test1 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -380,17 +390,22 @@ STAGE PLANS:
             partition values:
               ds 1
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test1
+              numFiles 0
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct test1 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -515,17 +530,22 @@ STAGE PLANS:
             partition values:
               ds 1
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test1
+              numFiles 0
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct test1 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -554,17 +574,22 @@ STAGE PLANS:
             partition values:
               ds 2
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test1
+              numFiles 0
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct test1 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -678,17 +703,22 @@ STAGE PLANS:
             partition values:
               ds 1
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test1
+              numFiles 0
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct test1 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -718,17 +748,22 @@ STAGE PLANS:
             partition values:
               ds 2
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test1
+              numFiles 0
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct test1 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -948,17 +983,22 @@ STAGE PLANS:
               ds 1
               hr 1
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test2
+              numFiles 0
+              numRows 0
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 0
               serialization.ddl struct test2 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -988,17 +1028,22 @@ STAGE PLANS:
               ds 1
               hr 2
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test2
+              numFiles 0
+              numRows 0
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 0
               serialization.ddl struct test2 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -1028,17 +1073,22 @@ STAGE PLANS:
               ds 1
               hr 3
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test2
+              numFiles 0
+              numRows 0
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 0
               serialization.ddl struct test2 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -1163,17 +1213,22 @@ STAGE PLANS:
               ds 1
               hr 1
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test2
+              numFiles 0
+              numRows 0
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 0
               serialization.ddl struct test2 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -1204,17 +1259,22 @@ STAGE PLANS:
               ds 1
               hr 2
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test2
+              numFiles 0
+              numRows 0
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 0
               serialization.ddl struct test2 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -1245,17 +1305,22 @@ STAGE PLANS:
               ds 1
               hr 3
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test2
+              numFiles 0
+              numRows 0
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 0
               serialization.ddl struct test2 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
@@ -1375,17 +1440,22 @@ STAGE PLANS:
             partition values:
               ds 1
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test1
+              numFiles 0
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct test1 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -1414,17 +1484,22 @@ STAGE PLANS:
             partition values:
               ds 2
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test1
+              numFiles 0
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct test1 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -1598,17 +1673,22 @@ STAGE PLANS:
               ds 01:10:10
               hr 01
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test2
+              numFiles 0
+              numRows 0
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 0
               serialization.ddl struct test2 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -1638,17 +1718,22 @@ STAGE PLANS:
               ds 01:10:20
               hr 02
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test2
+              numFiles 0
+              numRows 0
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 0
               serialization.ddl struct test2 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -1678,17 +1763,22 @@ STAGE PLANS:
               ds 1
               hr 1
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test2
+              numFiles 0
+              numRows 0
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 0
               serialization.ddl struct test2 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -1718,17 +1808,22 @@ STAGE PLANS:
               ds 1
               hr 2
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test2
+              numFiles 0
+              numRows 0
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 0
               serialization.ddl struct test2 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           
@@ -1758,17 +1853,22 @@ STAGE PLANS:
               ds 1
               hr 3
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns a,b
               columns.comments 
               columns.types int:double
 #### A masked pattern was here ####
               name default.test2
+              numFiles 0
+              numRows 0
               partition_columns ds/hr
               partition_columns.types string:string
+              rawDataSize 0
               serialization.ddl struct test2 { i32 a, double b}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.NullStructSerDe
           

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/nullformat.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/nullformat.q.out b/ql/src/test/results/clientpositive/nullformat.q.out
index af91470..2a80359 100644
--- a/ql/src/test/results/clientpositive/nullformat.q.out
+++ b/ql/src/test/results/clientpositive/nullformat.q.out
@@ -93,6 +93,11 @@ OUTPUTFORMAT
 LOCATION
 #### A masked pattern was here ####
 TBLPROPERTIES (
+  'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', 
+  'numFiles'='0', 
+  'numRows'='0', 
+  'rawDataSize'='0', 
+  'totalSize'='0', 
 #### A masked pattern was here ####
 PREHOOK: query: -- load null data from another table and verify that the null is stored in the expected format
 INSERT OVERWRITE TABLE null_tab1 SELECT a,b FROM base_tab

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/orc_create.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_create.q.out b/ql/src/test/results/clientpositive/orc_create.q.out
index 3013fda..20c3fce 100644
--- a/ql/src/test/results/clientpositive/orc_create.q.out
+++ b/ql/src/test/results/clientpositive/orc_create.q.out
@@ -70,6 +70,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -319,6 +324,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/orc_llap.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_llap.q.out b/ql/src/test/results/clientpositive/orc_llap.q.out
index 6fc73b7..bae69bb 100644
--- a/ql/src/test/results/clientpositive/orc_llap.q.out
+++ b/ql/src/test/results/clientpositive/orc_llap.q.out
@@ -719,17 +719,17 @@ STAGE PLANS:
           TableScan
             alias: orc_llap
             filterExpr: ((cint > 10) and cbigint is not null) (type: boolean)
-            Statistics: Num rows: 98779 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((cint > 10) and cbigint is not null) (type: boolean)
-              Statistics: Num rows: 32926 Data size: 526817 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 81920 Data size: 19386626 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cint (type: int), csmallint (type: smallint), cbigint (type: bigint)
                 outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 32926 Data size: 526817 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 81920 Data size: 19386626 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 32926 Data size: 526817 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 81920 Data size: 19386626 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -792,17 +792,17 @@ STAGE PLANS:
           TableScan
             alias: orc_llap
             filterExpr: ((cint > 10) and cbigint is not null) (type: boolean)
-            Statistics: Num rows: 4938 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((cint > 10) and cbigint is not null) (type: boolean)
-              Statistics: Num rows: 1646 Data size: 526823 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 81920 Data size: 19386626 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
-                Statistics: Num rows: 1646 Data size: 526823 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 81920 Data size: 19386626 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1646 Data size: 526823 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 81920 Data size: 19386626 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -874,17 +874,17 @@ STAGE PLANS:
           TableScan
             alias: orc_llap
             filterExpr: ((cint > 5) and (cint < 10)) (type: boolean)
-            Statistics: Num rows: 15196 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((cint > 5) and (cint < 10)) (type: boolean)
-              Statistics: Num rows: 1688 Data size: 175561 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 27306 Data size: 6462051 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cstring2 (type: string)
                 outputColumnNames: _col0
-                Statistics: Num rows: 1688 Data size: 175561 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 27306 Data size: 6462051 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1688 Data size: 175561 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 27306 Data size: 6462051 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -944,22 +944,22 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_llap
-            Statistics: Num rows: 7902 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: cstring1 (type: string), cstring2 (type: string)
               outputColumnNames: cstring1, cstring2
-              Statistics: Num rows: 7902 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count()
                 keys: cstring1 (type: string), cstring2 (type: string)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 7902 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: string), _col1 (type: string)
                   sort order: ++
                   Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                  Statistics: Num rows: 7902 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col2 (type: bigint)
       Execution mode: vectorized
       LLAP IO: all inputs
@@ -969,10 +969,10 @@ STAGE PLANS:
           keys: KEY._col0 (type: string), KEY._col1 (type: string)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 3951 Data size: 790234 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 122880 Data size: 29079940 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 3951 Data size: 790234 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 122880 Data size: 29079940 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1039,14 +1039,14 @@ STAGE PLANS:
           TableScan
             alias: o1
             filterExpr: (csmallint is not null and cbigint is not null) (type: boolean)
-            Statistics: Num rows: 14111 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (csmallint is not null and cbigint is not null) (type: boolean)
-              Statistics: Num rows: 14111 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: csmallint (type: smallint), cstring1 (type: string)
                 outputColumnNames: _col0, _col2
-                Statistics: Num rows: 14111 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
                 HashTable Sink Operator
                   keys:
                     0 _col0 (type: smallint)
@@ -1058,14 +1058,14 @@ STAGE PLANS:
           TableScan
             alias: o1
             filterExpr: (csmallint is not null and cbigint is not null) (type: boolean)
-            Statistics: Num rows: 14111 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (csmallint is not null and cbigint is not null) (type: boolean)
-              Statistics: Num rows: 14111 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: csmallint (type: smallint), cstring2 (type: string)
                 outputColumnNames: _col0, _col2
-                Statistics: Num rows: 14111 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE
                 Map Join Operator
                   condition map:
                        Inner Join 0 to 1
@@ -1073,14 +1073,14 @@ STAGE PLANS:
                     0 _col0 (type: smallint)
                     1 _col0 (type: smallint)
                   outputColumnNames: _col2, _col5
-                  Statistics: Num rows: 15522 Data size: 1738515 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 270336 Data size: 63975869 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col2 (type: string), _col5 (type: string)
                     outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 15522 Data size: 1738515 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 270336 Data size: 63975869 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
-                      Statistics: Num rows: 15522 Data size: 1738515 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 270336 Data size: 63975869 Basic stats: COMPLETE Column stats: NONE
                       table:
                           input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out b/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
index 7b361b7..38321e9 100644
--- a/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
+++ b/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
@@ -135,11 +135,11 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_pred
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: hash(t) (type: int)
               outputColumnNames: _col0
-              Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: sum(_col0)
                 mode: hash
@@ -183,11 +183,11 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_pred
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: hash(t) (type: int)
               outputColumnNames: _col0
-              Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: sum(_col0)
                 mode: hash
@@ -311,14 +311,14 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_pred
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((t < 0) and (UDFToInteger(t) > -2)) (type: boolean)
-              Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: hash(t) (type: int)
                 outputColumnNames: _col0
-                Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   aggregations: sum(_col0)
                   mode: hash
@@ -369,14 +369,14 @@ STAGE PLANS:
           TableScan
             alias: orc_pred
             filterExpr: ((t < 0) and (UDFToInteger(t) > -2)) (type: boolean)
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((t < 0) and (UDFToInteger(t) > -2)) (type: boolean)
-              Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: hash(t) (type: int)
                 outputColumnNames: _col0
-                Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   aggregations: sum(_col0)
                   mode: hash
@@ -458,17 +458,17 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_pred
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 232 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((t = -1) and s is not null and (s like 'bob%')) (type: boolean)
-              Statistics: Num rows: 262 Data size: 77718 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 58 Data size: 6037 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: -1 (type: tinyint), s (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 262 Data size: 77718 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 58 Data size: 6037 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 262 Data size: 77718 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 58 Data size: 6037 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -501,17 +501,17 @@ STAGE PLANS:
           TableScan
             alias: orc_pred
             filterExpr: ((t = -1) and s is not null and (s like 'bob%')) (type: boolean)
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 232 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((t = -1) and s is not null and (s like 'bob%')) (type: boolean)
-              Statistics: Num rows: 262 Data size: 77718 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 58 Data size: 6037 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: -1 (type: tinyint), s (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 262 Data size: 77718 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 58 Data size: 6037 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 262 Data size: 77718 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 58 Data size: 6037 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -589,26 +589,26 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_pred
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 232 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (s is not null and (s like 'bob%') and (not (t) IN (-1, -2, -3)) and t BETWEEN 25 AND 30) (type: boolean)
-              Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: t (type: tinyint), s (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: tinyint), _col1 (type: string)
                   sort order: ++
-                  Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: string)
           outputColumnNames: _col0, _col1
-          Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -645,26 +645,26 @@ STAGE PLANS:
           TableScan
             alias: orc_pred
             filterExpr: (s is not null and (s like 'bob%') and (not (t) IN (-1, -2, -3)) and t BETWEEN 25 AND 30) (type: boolean)
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 232 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (s is not null and (s like 'bob%') and (not (t) IN (-1, -2, -3)) and t BETWEEN 25 AND 30) (type: boolean)
-              Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: t (type: tinyint), s (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: tinyint), _col1 (type: string)
                   sort order: ++
-                  Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: string)
           outputColumnNames: _col0, _col1
-          Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -766,31 +766,31 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_pred
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((d >= 10.0) and (d < 12.0) and (s like '%son') and (t > 0) and si BETWEEN 300 AND 400 and (not (s like '%car%'))) (type: boolean)
-              Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col3 (type: string)
                   sort order: -
-                  Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
                   TopN Hash Memory Usage: 0.1
                   value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
       Reduce Operator Tree:
         Select Operator
           expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
           Limit
             Number of rows: 3
-            Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -835,31 +835,31 @@ STAGE PLANS:
           TableScan
             alias: orc_pred
             filterExpr: ((d >= 10.0) and (d < 12.0) and (s like '%son') and (t > 0) and si BETWEEN 300 AND 400 and (not (s like '%car%'))) (type: boolean)
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((d >= 10.0) and (d < 12.0) and (s like '%son') and (t > 0) and si BETWEEN 300 AND 400 and (not (s like '%car%'))) (type: boolean)
-              Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col3 (type: string)
                   sort order: -
-                  Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
                   TopN Hash Memory Usage: 0.1
                   value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
       Reduce Operator Tree:
         Select Operator
           expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
           Limit
             Number of rows: 3
-            Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -970,28 +970,28 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_pred
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((t > 10) and (t <> 101) and (d >= 10.0) and (d < 12.0) and (s like '%son') and (not (s like '%car%')) and (t > 0) and si BETWEEN 300 AND 400) (type: boolean)
-              Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col3 (type: string)
                   sort order: -
-                  Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
                   TopN Hash Memory Usage: 0.1
                   value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
       Reduce Operator Tree:
         Select Operator
           expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
           Limit
             Number of rows: 3
-            Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
               table:
@@ -1006,20 +1006,20 @@ STAGE PLANS:
             Reduce Output Operator
               key expressions: _col3 (type: string)
               sort order: -
-              Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
               TopN Hash Memory Usage: 0.1
               value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
       Reduce Operator Tree:
         Select Operator
           expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
           Limit
             Number of rows: 3
-            Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1069,28 +1069,28 @@ STAGE PLANS:
           TableScan
             alias: orc_pred
             filterExpr: ((t > 10) and (t <> 101) and (d >= 10.0) and (d < 12.0) and (s like '%son') and (not (s like '%car%')) and (t > 0) and si BETWEEN 300 AND 400) (type: boolean)
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((t > 10) and (t <> 101) and (d >= 10.0) and (d < 12.0) and (s like '%son') and (not (s like '%car%')) and (t > 0) and si BETWEEN 300 AND 400) (type: boolean)
-              Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col3 (type: string)
                   sort order: -
-                  Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
                   TopN Hash Memory Usage: 0.1
                   value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
       Reduce Operator Tree:
         Select Operator
           expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
           Limit
             Number of rows: 3
-            Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
               table:
@@ -1105,20 +1105,20 @@ STAGE PLANS:
             Reduce Output Operator
               key expressions: _col3 (type: string)
               sort order: -
-              Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
               TopN Hash Memory Usage: 0.1
               value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
       Reduce Operator Tree:
         Select Operator
           expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
           Limit
             Number of rows: 3
-            Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/parquet_array_null_element.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_array_null_element.q.out b/ql/src/test/results/clientpositive/parquet_array_null_element.q.out
index 387f01e..75d2d27 100644
--- a/ql/src/test/results/clientpositive/parquet_array_null_element.q.out
+++ b/ql/src/test/results/clientpositive/parquet_array_null_element.q.out
@@ -70,6 +70,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/parquet_create.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_create.q.out b/ql/src/test/results/clientpositive/parquet_create.q.out
index c6d33ff..cc4b735 100644
--- a/ql/src/test/results/clientpositive/parquet_create.q.out
+++ b/ql/src/test/results/clientpositive/parquet_create.q.out
@@ -73,6 +73,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/parquet_mixed_partition_formats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_mixed_partition_formats.q.out b/ql/src/test/results/clientpositive/parquet_mixed_partition_formats.q.out
index d6affd6..e96aa80 100644
--- a/ql/src/test/results/clientpositive/parquet_mixed_partition_formats.q.out
+++ b/ql/src/test/results/clientpositive/parquet_mixed_partition_formats.q.out
@@ -126,6 +126,8 @@ Table:              	parquet_mixed_partition_formats
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	numFiles            	1                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	2521                
 #### A masked pattern was here ####
 	 	 
@@ -247,6 +249,8 @@ Table:              	parquet_mixed_partition_formats
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	numFiles            	1                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	2521                
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/parquet_serde.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_serde.q.out b/ql/src/test/results/clientpositive/parquet_serde.q.out
index c1e594a..6d5f0f8 100644
--- a/ql/src/test/results/clientpositive/parquet_serde.q.out
+++ b/ql/src/test/results/clientpositive/parquet_serde.q.out
@@ -72,6 +72,8 @@ Table:              	parquet_mixed_fileformat
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	numFiles            	1                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	36                  
 #### A masked pattern was here ####
 	 	 
@@ -173,6 +175,8 @@ Table:              	parquet_mixed_fileformat
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	numFiles            	1                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	36                  
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/part_inherit_tbl_props.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/part_inherit_tbl_props.q.out b/ql/src/test/results/clientpositive/part_inherit_tbl_props.q.out
index 82f4750..876d0db 100644
--- a/ql/src/test/results/clientpositive/part_inherit_tbl_props.q.out
+++ b/ql/src/test/results/clientpositive/part_inherit_tbl_props.q.out
@@ -38,8 +38,13 @@ Database:           	default
 Table:              	mytbl               	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 	a                   	myval               
 	b                   	yourval             
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/part_inherit_tbl_props_empty.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/part_inherit_tbl_props_empty.q.out b/ql/src/test/results/clientpositive/part_inherit_tbl_props_empty.q.out
index e4b8003..95c2d6e 100644
--- a/ql/src/test/results/clientpositive/part_inherit_tbl_props_empty.q.out
+++ b/ql/src/test/results/clientpositive/part_inherit_tbl_props_empty.q.out
@@ -34,6 +34,11 @@ Database:           	default
 Table:              	mytbl               	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/part_inherit_tbl_props_with_star.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/part_inherit_tbl_props_with_star.q.out b/ql/src/test/results/clientpositive/part_inherit_tbl_props_with_star.q.out
index 106448a..0be588a 100644
--- a/ql/src/test/results/clientpositive/part_inherit_tbl_props_with_star.q.out
+++ b/ql/src/test/results/clientpositive/part_inherit_tbl_props_with_star.q.out
@@ -38,9 +38,14 @@ Database:           	default
 Table:              	mytbl               	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 	a                   	myval               
 	b                   	yourval             
 	c                   	noval               
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/partition_coltype_literals.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/partition_coltype_literals.q.out b/ql/src/test/results/clientpositive/partition_coltype_literals.q.out
index bc159eb..b7a09d2 100644
--- a/ql/src/test/results/clientpositive/partition_coltype_literals.q.out
+++ b/ql/src/test/results/clientpositive/partition_coltype_literals.q.out
@@ -45,6 +45,11 @@ Database:           	default
 Table:              	partcoltypenum      	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -92,8 +97,11 @@ Database:           	default
 Table:              	partcoltypenum      	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -142,8 +150,11 @@ Database:           	default
 Table:              	partcoltypenum      	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -193,8 +204,11 @@ Database:           	default
 Table:              	partcoltypenum      	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/pcr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/pcr.q.out b/ql/src/test/results/clientpositive/pcr.q.out
index b53226e..7222617 100644
--- a/ql/src/test/results/clientpositive/pcr.q.out
+++ b/ql/src/test/results/clientpositive/pcr.q.out
@@ -3419,15 +3419,20 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                       bucket_count -1
                       columns key,value
                       columns.comments 
                       columns.types int:string
 #### A masked pattern was here ####
                       name default.pcr_t2
+                      numFiles 0
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct pcr_t2 { i32 key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 0
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.pcr_t2
@@ -3449,15 +3454,20 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                       bucket_count -1
                       columns key,value
                       columns.comments 
                       columns.types int:string
 #### A masked pattern was here ####
                       name default.pcr_t3
+                      numFiles 0
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct pcr_t3 { i32 key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 0
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.pcr_t3
@@ -3533,15 +3543,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.pcr_t2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct pcr_t2 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.pcr_t2
@@ -3564,15 +3579,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.pcr_t2
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct pcr_t2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.pcr_t2
@@ -3588,30 +3608,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.pcr_t2
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct pcr_t2 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.pcr_t2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct pcr_t2 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.pcr_t2
@@ -3633,15 +3663,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.pcr_t2
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct pcr_t2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.pcr_t2
@@ -3657,30 +3692,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.pcr_t2
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct pcr_t2 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.pcr_t2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct pcr_t2 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.pcr_t2
@@ -3712,15 +3757,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.pcr_t3
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct pcr_t3 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.pcr_t3
@@ -3743,15 +3793,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.pcr_t3
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct pcr_t3 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.pcr_t3
@@ -3767,30 +3822,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.pcr_t3
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct pcr_t3 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.pcr_t3
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct pcr_t3 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.pcr_t3
@@ -3812,15 +3877,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.pcr_t3
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct pcr_t3 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.pcr_t3
@@ -3836,30 +3906,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.pcr_t3
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct pcr_t3 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.pcr_t3
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct pcr_t3 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.pcr_t3


[08/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/schema_evol_text_vecrow_mapwork_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/schema_evol_text_vecrow_mapwork_table.q.out b/ql/src/test/results/clientpositive/schema_evol_text_vecrow_mapwork_table.q.out
index 50790f4..561c128 100644
--- a/ql/src/test/results/clientpositive/schema_evol_text_vecrow_mapwork_table.q.out
+++ b/ql/src/test/results/clientpositive/schema_evol_text_vecrow_mapwork_table.q.out
@@ -52,6 +52,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -187,25 +192,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_add_int_permute_select
-            Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), a (type: int), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -318,6 +323,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -456,25 +466,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_add_int_string_permute_select
-            Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), a (type: int), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -740,25 +750,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_double
-            Statistics: Num rows: 10 Data size: 383 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 151 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: double), c2 (type: double), c3 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 383 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 151 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 383 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 151 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 383 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 151 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 383 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 151 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -912,25 +922,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_date_group_string_group_timestamp
-            Statistics: Num rows: 9 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 9 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 9 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: char(50)), _col3 (type: char(15)), _col4 (type: varchar(50)), _col5 (type: varchar(15)), _col6 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(15)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: varchar(15)), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 9 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 9 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1100,25 +1110,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_date_group_string_group_date
-            Statistics: Num rows: 9 Data size: 555 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 9 Data size: 555 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 9 Data size: 555 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: char(50)), _col3 (type: char(15)), _col4 (type: varchar(50)), _col5 (type: varchar(15)), _col6 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(15)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: varchar(15)), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 9 Data size: 555 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 9 Data size: 555 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1268,25 +1278,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_string
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1429,25 +1439,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_char
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(50)), c3 (type: char(50)), c4 (type: char(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(50)), _col3 (type: char(50)), _col4 (type: char(50)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(50)), VALUE._col3 (type: char(50)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1590,25 +1600,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_char_trunc
-            Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(5)), c2 (type: char(5)), c3 (type: char(5)), c4 (type: char(5)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(5)), _col2 (type: char(5)), _col3 (type: char(5)), _col4 (type: char(5)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(5)), VALUE._col1 (type: char(5)), VALUE._col2 (type: char(5)), VALUE._col3 (type: char(5)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1751,25 +1761,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_varchar
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(50)), c3 (type: varchar(50)), c4 (type: varchar(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(50)), _col3 (type: varchar(50)), _col4 (type: varchar(50)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(50)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1912,25 +1922,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_varchar_trunc
-            Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(5)), c2 (type: varchar(5)), c3 (type: varchar(5)), c4 (type: varchar(5)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(5)), _col2 (type: varchar(5)), _col3 (type: varchar(5)), _col4 (type: varchar(5)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(5)), VALUE._col1 (type: varchar(5)), VALUE._col2 (type: varchar(5)), VALUE._col3 (type: varchar(5)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2071,25 +2081,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_string
-            Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2230,25 +2240,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_char
-            Statistics: Num rows: 10 Data size: 385 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(50)), c3 (type: char(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 385 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 385 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(50)), _col3 (type: char(50)), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(50)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 385 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 385 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2389,25 +2399,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_char_trunc
-            Statistics: Num rows: 10 Data size: 357 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(7)), c2 (type: char(7)), c3 (type: char(7)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 357 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 357 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(7)), _col2 (type: char(7)), _col3 (type: char(7)), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(7)), VALUE._col1 (type: char(7)), VALUE._col2 (type: char(7)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 357 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 357 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2548,25 +2558,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_varchar
-            Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(50)), c3 (type: varchar(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(50)), _col3 (type: varchar(50)), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(50)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2707,25 +2717,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_varchar_trunc
-            Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(7)), c2 (type: varchar(7)), c3 (type: varchar(7)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(7)), _col2 (type: varchar(7)), _col3 (type: varchar(7)), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(7)), VALUE._col1 (type: varchar(7)), VALUE._col2 (type: varchar(7)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2876,25 +2886,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_string
-            Statistics: Num rows: 10 Data size: 548 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: varchar(50)), c4 (type: char(9)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 548 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 548 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(9)), _col3 (type: varchar(50)), _col4 (type: char(9)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(9)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: char(9)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 548 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 548 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3035,25 +3045,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_char
-            Statistics: Num rows: 10 Data size: 463 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 178 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(9)), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 463 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 178 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 463 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 178 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(9)), _col3 (type: string), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(9)), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 463 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 178 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 463 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 178 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3194,25 +3204,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_varchar
-            Statistics: Num rows: 10 Data size: 452 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 452 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 452 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(9)), _col3 (type: string), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(9)), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 452 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 452 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3367,25 +3377,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_tinyint
-            Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: smallint), c2 (type: int), c3 (type: bigint), c4 (type: decimal(38,18)), c5 (type: float), c6 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-              Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: decimal(38,18)), _col5 (type: float), _col6 (type: double), _col7 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: smallint), VALUE._col1 (type: int), VALUE._col2 (type: bigint), VALUE._col3 (type: decimal(38,18)), VALUE._col4 (type: float), VALUE._col5 (type: double), VALUE._col6 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-          Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3530,25 +3540,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_smallint
-            Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: int), c2 (type: bigint), c3 (type: decimal(38,18)), c4 (type: float), c5 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: bigint), _col3 (type: decimal(38,18)), _col4 (type: float), _col5 (type: double), _col6 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: bigint), VALUE._col2 (type: decimal(38,18)), VALUE._col3 (type: float), VALUE._col4 (type: double), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3691,25 +3701,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_int
-            Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: bigint), c2 (type: decimal(38,18)), c3 (type: float), c4 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: bigint), _col2 (type: decimal(38,18)), _col3 (type: float), _col4 (type: double), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: decimal(38,18)), VALUE._col2 (type: float), VALUE._col3 (type: double), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3850,25 +3860,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_bigint
-            Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: decimal(38,18)), c2 (type: float), c3 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: decimal(38,18)), _col2 (type: float), _col3 (type: double), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: decimal(38,18)), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -4007,25 +4017,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_decimal
-            Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: float), c2 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: float), _col2 (type: double), _col3 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: float), VALUE._col1 (type: double), VALUE._col2 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -4162,25 +4172,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_float
-            Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: double), _col2 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/show_create_table_alter.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/show_create_table_alter.q.out b/ql/src/test/results/clientpositive/show_create_table_alter.q.out
index 32819ea..d09f30b 100644
--- a/ql/src/test/results/clientpositive/show_create_table_alter.q.out
+++ b/ql/src/test/results/clientpositive/show_create_table_alter.q.out
@@ -35,6 +35,11 @@ OUTPUTFORMAT
 LOCATION
 #### A masked pattern was here ####
 TBLPROPERTIES (
+  'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', 
+  'numFiles'='0', 
+  'numRows'='0', 
+  'rawDataSize'='0', 
+  'totalSize'='0', 
 #### A masked pattern was here ####
 PREHOOK: query: -- Add a comment to the table, change the EXTERNAL property, and test SHOW CREATE TABLE on the change.
 ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='temporary table', 'EXTERNAL'='FALSE')
@@ -73,6 +78,8 @@ TBLPROPERTIES (
   'EXTERNAL'='FALSE', 
 #### A masked pattern was here ####
   'numFiles'='0', 
+  'numRows'='0', 
+  'rawDataSize'='0', 
   'totalSize'='0', 
 #### A masked pattern was here ####
 PREHOOK: query: -- Alter the table comment, change the EXTERNAL property back and test SHOW CREATE TABLE on the change.
@@ -111,6 +118,8 @@ LOCATION
 TBLPROPERTIES (
 #### A masked pattern was here ####
   'numFiles'='0', 
+  'numRows'='0', 
+  'rawDataSize'='0', 
   'totalSize'='0', 
 #### A masked pattern was here ####
 PREHOOK: query: -- Change the 'SORTBUCKETCOLSPREFIX' property and test SHOW CREATE TABLE. The output should not change.
@@ -149,6 +158,8 @@ LOCATION
 TBLPROPERTIES (
 #### A masked pattern was here ####
   'numFiles'='0', 
+  'numRows'='0', 
+  'rawDataSize'='0', 
   'totalSize'='0', 
 #### A masked pattern was here ####
 PREHOOK: query: -- Alter the storage handler of the table, and test SHOW CREATE TABLE.
@@ -187,6 +198,8 @@ LOCATION
 TBLPROPERTIES (
 #### A masked pattern was here ####
   'numFiles'='0', 
+  'numRows'='0', 
+  'rawDataSize'='0', 
   'totalSize'='0', 
 #### A masked pattern was here ####
 PREHOOK: query: DROP TABLE tmp_showcrt1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/show_create_table_db_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/show_create_table_db_table.q.out b/ql/src/test/results/clientpositive/show_create_table_db_table.q.out
index 495f4b5..daf63e9 100644
--- a/ql/src/test/results/clientpositive/show_create_table_db_table.q.out
+++ b/ql/src/test/results/clientpositive/show_create_table_db_table.q.out
@@ -46,6 +46,11 @@ OUTPUTFORMAT
 LOCATION
 #### A masked pattern was here ####
 TBLPROPERTIES (
+  'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', 
+  'numFiles'='0', 
+  'numRows'='0', 
+  'rawDataSize'='0', 
+  'totalSize'='0', 
 #### A masked pattern was here ####
 PREHOOK: query: DROP TABLE tmp_feng.tmp_showcrt
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/show_create_table_serde.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/show_create_table_serde.q.out b/ql/src/test/results/clientpositive/show_create_table_serde.q.out
index 2350d98..a7bcb44 100644
--- a/ql/src/test/results/clientpositive/show_create_table_serde.q.out
+++ b/ql/src/test/results/clientpositive/show_create_table_serde.q.out
@@ -42,6 +42,8 @@ LOCATION
 TBLPROPERTIES (
 #### A masked pattern was here ####
   'numFiles'='0', 
+  'numRows'='0', 
+  'rawDataSize'='0', 
   'totalSize'='0', 
 #### A masked pattern was here ####
 PREHOOK: query: DROP TABLE tmp_showcrt1
@@ -90,6 +92,11 @@ OUTPUTFORMAT
 LOCATION
 #### A masked pattern was here ####
 TBLPROPERTIES (
+  'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', 
+  'numFiles'='0', 
+  'numRows'='0', 
+  'rawDataSize'='0', 
+  'totalSize'='0', 
 #### A masked pattern was here ####
 PREHOOK: query: DROP TABLE tmp_showcrt1
 PREHOOK: type: DROPTABLE
@@ -139,6 +146,11 @@ OUTPUTFORMAT
 LOCATION
 #### A masked pattern was here ####
 TBLPROPERTIES (
+  'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', 
+  'numFiles'='0', 
+  'numRows'='0', 
+  'rawDataSize'='0', 
+  'totalSize'='0', 
 #### A masked pattern was here ####
 PREHOOK: query: DROP TABLE tmp_showcrt1
 PREHOOK: type: DROPTABLE
@@ -183,6 +195,11 @@ WITH SERDEPROPERTIES (
 LOCATION
 #### A masked pattern was here ####
 TBLPROPERTIES (
+  'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', 
+  'numFiles'='0', 
+  'numRows'='0', 
+  'rawDataSize'='0', 
+  'totalSize'='0', 
 #### A masked pattern was here ####
 PREHOOK: query: DROP TABLE tmp_showcrt1
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/show_tblproperties.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/show_tblproperties.q.out b/ql/src/test/results/clientpositive/show_tblproperties.q.out
index 63bbe6d..e1c6670 100644
--- a/ql/src/test/results/clientpositive/show_tblproperties.q.out
+++ b/ql/src/test/results/clientpositive/show_tblproperties.q.out
@@ -39,6 +39,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES
 bar	bar value
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 tmp	true
 totalSize	0
 #### A masked pattern was here ####
@@ -54,6 +56,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES
 bar	bar value
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 tmp	true
 totalSize	0
 #### A masked pattern was here ####
@@ -107,6 +111,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES
 bar	bar value
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 tmp	true
 totalSize	0
 #### A masked pattern was here ####
@@ -124,6 +130,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES
 bar	bar value1
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 tmp	true1
 totalSize	0
 #### A masked pattern was here ####
@@ -147,6 +155,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES
 bar	bar value1
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 tmp	true1
 totalSize	0
 #### A masked pattern was here ####

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_1.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_1.q.out
index dfb571d..e2be217 100644
--- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_1.q.out
+++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_1.q.out
@@ -174,8 +174,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -219,8 +221,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -374,8 +378,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -419,8 +425,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -564,8 +572,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -665,8 +675,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -710,8 +722,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_12.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_12.q.out
index 76ea0a8..eff3671 100644
--- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_12.q.out
+++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_12.q.out
@@ -194,8 +194,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -266,8 +268,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_medium
                     numFiles 3
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_medium { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -331,8 +335,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_medium
                     numFiles 3
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_medium { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -439,8 +445,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -484,8 +492,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_3.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_3.q.out
index c942d11..e09df8c 100644
--- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_3.q.out
+++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_3.q.out
@@ -154,8 +154,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -308,8 +310,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -452,8 +456,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -497,8 +503,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -599,8 +607,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_4.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_4.q.out
index ba2547a..a59c8a4 100644
--- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_4.q.out
+++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_4.q.out
@@ -170,8 +170,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -324,8 +326,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -468,8 +472,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -513,8 +519,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -615,8 +623,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_5.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_5.q.out
index 99fca25..04e5f40 100644
--- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_5.q.out
+++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_5.q.out
@@ -140,6 +140,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -159,6 +161,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.bucket_big
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct bucket_big { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -285,6 +289,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -304,6 +310,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.bucket_big
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct bucket_big { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -420,6 +428,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -439,6 +449,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.bucket_small
                       numFiles 4
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct bucket_small { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -518,6 +530,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -537,6 +551,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.bucket_big
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct bucket_big { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_7.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_7.q.out
index 00c601f..6f8307b 100644
--- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_7.q.out
+++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_7.q.out
@@ -187,8 +187,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -232,8 +234,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -389,8 +393,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -434,8 +440,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -581,8 +589,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -626,8 +636,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -728,8 +740,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -773,8 +787,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_8.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_8.q.out
index 5564ceb..51f71c0 100644
--- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_8.q.out
+++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_8.q.out
@@ -187,8 +187,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -232,8 +234,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -389,8 +393,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -434,8 +440,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -583,8 +591,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -628,8 +638,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -730,8 +742,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -775,8 +789,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe


[19/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out
index aede979..0385622 100644
--- a/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out
+++ b/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out
@@ -129,8 +129,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -216,15 +218,20 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
+                            COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                             bucket_count -1
                             columns key,value1,value2
                             columns.comments 
                             columns.types string:string:string
 #### A masked pattern was here ####
                             name default.bucketmapjoin_tmp_result
+                            numFiles 0
+                            numRows 0
+                            rawDataSize 0
                             serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            totalSize 0
 #### A masked pattern was here ####
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: default.bucketmapjoin_tmp_result
@@ -252,8 +259,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -296,8 +305,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_2
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -336,15 +347,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -439,8 +455,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -567,8 +585,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -611,8 +631,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_2
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketcontext_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketcontext_1.q.out b/ql/src/test/results/clientpositive/bucketcontext_1.q.out
index d1b7744..f8b3020 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_1.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_1.q.out
@@ -130,8 +130,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -235,8 +237,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -280,8 +284,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -422,8 +428,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -467,8 +475,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketcontext_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketcontext_2.q.out b/ql/src/test/results/clientpositive/bucketcontext_2.q.out
index 60c021c..9a39392 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_2.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_2.q.out
@@ -114,8 +114,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -219,8 +221,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -264,8 +268,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -406,8 +412,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -451,8 +459,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketcontext_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketcontext_3.q.out b/ql/src/test/results/clientpositive/bucketcontext_3.q.out
index 12eaddf..b1b8fcc 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_3.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_3.q.out
@@ -114,8 +114,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -157,8 +159,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -262,8 +266,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -403,8 +409,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketcontext_4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketcontext_4.q.out b/ql/src/test/results/clientpositive/bucketcontext_4.q.out
index 2573564..95d1a6b 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_4.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_4.q.out
@@ -130,8 +130,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -173,8 +175,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -278,8 +282,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -419,8 +425,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketcontext_5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketcontext_5.q.out b/ql/src/test/results/clientpositive/bucketcontext_5.q.out
index e1c6a80..39286fb 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_5.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_5.q.out
@@ -155,6 +155,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -174,6 +176,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.bucket_big
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket_big { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -287,6 +291,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -306,6 +312,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.bucket_big
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket_big { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketcontext_6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketcontext_6.q.out b/ql/src/test/results/clientpositive/bucketcontext_6.q.out
index 9deb64d..bb7ffcb 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_6.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_6.q.out
@@ -174,8 +174,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -219,8 +221,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -359,8 +363,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -404,8 +410,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketcontext_7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketcontext_7.q.out b/ql/src/test/results/clientpositive/bucketcontext_7.q.out
index 621844e..16e62d1 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_7.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_7.q.out
@@ -147,8 +147,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -190,8 +192,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -295,8 +299,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -340,8 +346,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -484,8 +492,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -529,8 +539,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketcontext_8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketcontext_8.q.out b/ql/src/test/results/clientpositive/bucketcontext_8.q.out
index 7f3e50a..7a17693 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_8.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_8.q.out
@@ -147,8 +147,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -190,8 +192,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -295,8 +299,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -340,8 +346,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -484,8 +492,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -529,8 +539,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin1.q.out b/ql/src/test/results/clientpositive/bucketmapjoin1.q.out
index 9956fa7..0ef59a2 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin1.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin1.q.out
@@ -385,8 +385,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -472,15 +474,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value1,value2
                           columns.comments 
                           columns.types string:string:string
 #### A masked pattern was here ####
                           name default.bucketmapjoin_tmp_result
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.bucketmapjoin_tmp_result
@@ -506,6 +513,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -524,6 +533,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.srcbucket_mapjoin
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -553,15 +564,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -584,15 +600,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -608,30 +629,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -653,15 +684,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -677,30 +713,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -945,8 +991,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin10.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin10.q.out b/ql/src/test/results/clientpositive/bucketmapjoin10.q.out
index fcdbb09..0176c2a 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin10.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin10.q.out
@@ -167,8 +167,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 3
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -209,8 +211,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -305,8 +309,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 2
+              numRows 0
               partition_columns part
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -349,8 +355,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 3
+              numRows 0
               partition_columns part
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin11.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin11.q.out b/ql/src/test/results/clientpositive/bucketmapjoin11.q.out
index 968066a..d8e7b66 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin11.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin11.q.out
@@ -177,8 +177,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 4
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -219,8 +221,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -323,8 +327,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 2
+              numRows 0
               partition_columns part
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -367,8 +373,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 4
+              numRows 0
               partition_columns part
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -494,8 +502,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 4
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -536,8 +546,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -640,8 +652,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 2
+              numRows 0
               partition_columns part
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -684,8 +698,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 4
+              numRows 0
               partition_columns part
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin12.q.out b/ql/src/test/results/clientpositive/bucketmapjoin12.q.out
index 9017871..f53af00 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin12.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin12.q.out
@@ -136,8 +136,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -239,8 +241,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 2
+              numRows 0
               partition_columns part
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -364,8 +368,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_3
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_3 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -460,8 +466,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_1
               numFiles 2
+              numRows 0
               partition_columns part
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin2.q.out b/ql/src/test/results/clientpositive/bucketmapjoin2.q.out
index f2efb95..6809b12 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin2.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin2.q.out
@@ -133,8 +133,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -220,15 +222,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value1,value2
                           columns.comments 
                           columns.types string:string:string
 #### A masked pattern was here ####
                           name default.bucketmapjoin_tmp_result
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.bucketmapjoin_tmp_result
@@ -256,8 +263,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -305,15 +314,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -336,15 +350,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -360,30 +379,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -405,15 +434,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -429,30 +463,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -616,8 +660,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -744,8 +790,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_2
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1158,8 +1206,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1200,8 +1250,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1328,8 +1380,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin3.q.out b/ql/src/test/results/clientpositive/bucketmapjoin3.q.out
index 5659983..3f8e1e5 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin3.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin3.q.out
@@ -157,8 +157,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -244,15 +246,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value1,value2
                           columns.comments 
                           columns.types string:string:string
 #### A masked pattern was here ####
                           name default.bucketmapjoin_tmp_result
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.bucketmapjoin_tmp_result
@@ -280,8 +287,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_2
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -329,15 +338,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -360,15 +374,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -384,30 +403,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -429,15 +458,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -453,30 +487,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -640,8 +684,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -768,8 +814,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucketmapjoin4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin4.q.out b/ql/src/test/results/clientpositive/bucketmapjoin4.q.out
index 5794d47..9353318 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin4.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin4.q.out
@@ -201,15 +201,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value1,value2
                           columns.comments 
                           columns.types string:string:string
 #### A masked pattern was here ####
                           name default.bucketmapjoin_tmp_result
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.bucketmapjoin_tmp_result
@@ -235,6 +240,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -253,6 +260,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.srcbucket_mapjoin
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -282,15 +291,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -313,15 +327,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -337,30 +356,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -382,15 +411,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -406,30 +440,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -664,6 +708,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -682,6 +728,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.srcbucket_mapjoin
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe


[09/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/schema_evol_text_vec_mapwork_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/schema_evol_text_vec_mapwork_table.q.out b/ql/src/test/results/clientpositive/schema_evol_text_vec_mapwork_table.q.out
index 50328d3..aac5bc2 100644
--- a/ql/src/test/results/clientpositive/schema_evol_text_vec_mapwork_table.q.out
+++ b/ql/src/test/results/clientpositive/schema_evol_text_vec_mapwork_table.q.out
@@ -52,6 +52,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -187,25 +192,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_add_int_permute_select
-            Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), a (type: int), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -318,6 +323,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -456,25 +466,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_add_int_string_permute_select
-            Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), a (type: int), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -740,25 +750,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_double
-            Statistics: Num rows: 10 Data size: 383 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 151 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: double), c2 (type: double), c3 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 383 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 151 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 383 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 151 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 383 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 151 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 383 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 151 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -912,25 +922,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_date_group_string_group_timestamp
-            Statistics: Num rows: 9 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 9 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 9 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: char(50)), _col3 (type: char(15)), _col4 (type: varchar(50)), _col5 (type: varchar(15)), _col6 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(15)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: varchar(15)), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 9 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 9 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1100,25 +1110,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_date_group_string_group_date
-            Statistics: Num rows: 9 Data size: 555 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 9 Data size: 555 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 9 Data size: 555 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: char(50)), _col3 (type: char(15)), _col4 (type: varchar(50)), _col5 (type: varchar(15)), _col6 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(15)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: varchar(15)), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 9 Data size: 555 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 9 Data size: 555 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1268,25 +1278,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_string
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1429,25 +1439,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_char
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(50)), c3 (type: char(50)), c4 (type: char(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(50)), _col3 (type: char(50)), _col4 (type: char(50)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(50)), VALUE._col3 (type: char(50)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1590,25 +1600,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_char_trunc
-            Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(5)), c2 (type: char(5)), c3 (type: char(5)), c4 (type: char(5)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(5)), _col2 (type: char(5)), _col3 (type: char(5)), _col4 (type: char(5)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(5)), VALUE._col1 (type: char(5)), VALUE._col2 (type: char(5)), VALUE._col3 (type: char(5)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1751,25 +1761,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_varchar
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(50)), c3 (type: varchar(50)), c4 (type: varchar(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(50)), _col3 (type: varchar(50)), _col4 (type: varchar(50)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(50)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1912,25 +1922,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_varchar_trunc
-            Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(5)), c2 (type: varchar(5)), c3 (type: varchar(5)), c4 (type: varchar(5)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(5)), _col2 (type: varchar(5)), _col3 (type: varchar(5)), _col4 (type: varchar(5)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(5)), VALUE._col1 (type: varchar(5)), VALUE._col2 (type: varchar(5)), VALUE._col3 (type: varchar(5)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2071,25 +2081,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_string
-            Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2230,25 +2240,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_char
-            Statistics: Num rows: 10 Data size: 385 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(50)), c3 (type: char(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 385 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 385 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(50)), _col3 (type: char(50)), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(50)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 385 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 385 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2389,25 +2399,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_char_trunc
-            Statistics: Num rows: 10 Data size: 357 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(7)), c2 (type: char(7)), c3 (type: char(7)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 357 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 357 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(7)), _col2 (type: char(7)), _col3 (type: char(7)), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(7)), VALUE._col1 (type: char(7)), VALUE._col2 (type: char(7)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 357 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 357 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2548,25 +2558,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_varchar
-            Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(50)), c3 (type: varchar(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(50)), _col3 (type: varchar(50)), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(50)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2707,25 +2717,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_varchar_trunc
-            Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(7)), c2 (type: varchar(7)), c3 (type: varchar(7)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(7)), _col2 (type: varchar(7)), _col3 (type: varchar(7)), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(7)), VALUE._col1 (type: varchar(7)), VALUE._col2 (type: varchar(7)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2876,25 +2886,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_string
-            Statistics: Num rows: 10 Data size: 548 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: varchar(50)), c4 (type: char(9)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 548 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 548 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(9)), _col3 (type: varchar(50)), _col4 (type: char(9)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(9)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: char(9)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 548 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 548 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3035,25 +3045,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_char
-            Statistics: Num rows: 10 Data size: 463 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 178 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(9)), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 463 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 178 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 463 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 178 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(9)), _col3 (type: string), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(9)), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 463 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 178 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 463 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 178 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3194,25 +3204,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_varchar
-            Statistics: Num rows: 10 Data size: 452 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 452 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 452 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(9)), _col3 (type: string), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(9)), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 452 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 452 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3367,25 +3377,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_tinyint
-            Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: smallint), c2 (type: int), c3 (type: bigint), c4 (type: decimal(38,18)), c5 (type: float), c6 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-              Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: decimal(38,18)), _col5 (type: float), _col6 (type: double), _col7 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: smallint), VALUE._col1 (type: int), VALUE._col2 (type: bigint), VALUE._col3 (type: decimal(38,18)), VALUE._col4 (type: float), VALUE._col5 (type: double), VALUE._col6 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-          Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3530,25 +3540,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_smallint
-            Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: int), c2 (type: bigint), c3 (type: decimal(38,18)), c4 (type: float), c5 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: bigint), _col3 (type: decimal(38,18)), _col4 (type: float), _col5 (type: double), _col6 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: bigint), VALUE._col2 (type: decimal(38,18)), VALUE._col3 (type: float), VALUE._col4 (type: double), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3691,25 +3701,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_int
-            Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: bigint), c2 (type: decimal(38,18)), c3 (type: float), c4 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: bigint), _col2 (type: decimal(38,18)), _col3 (type: float), _col4 (type: double), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: decimal(38,18)), VALUE._col2 (type: float), VALUE._col3 (type: double), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3850,25 +3860,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_bigint
-            Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: decimal(38,18)), c2 (type: float), c3 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: decimal(38,18)), _col2 (type: float), _col3 (type: double), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: decimal(38,18)), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -4007,25 +4017,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_decimal
-            Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: float), c2 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: float), _col2 (type: double), _col3 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: float), VALUE._col1 (type: double), VALUE._col2 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -4162,25 +4172,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_float
-            Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: double), _col2 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat


[11/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/schema_evol_orc_vec_mapwork_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/schema_evol_orc_vec_mapwork_table.q.out b/ql/src/test/results/clientpositive/schema_evol_orc_vec_mapwork_table.q.out
index a305847..c84c042 100644
--- a/ql/src/test/results/clientpositive/schema_evol_orc_vec_mapwork_table.q.out
+++ b/ql/src/test/results/clientpositive/schema_evol_orc_vec_mapwork_table.q.out
@@ -48,6 +48,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -183,25 +188,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_add_int_permute_select
-            Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), a (type: int), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -314,6 +319,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -452,25 +462,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_add_int_string_permute_select
-            Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), a (type: int), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -736,25 +746,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_double
-            Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: double), c2 (type: double), c3 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 2346 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1656 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -908,25 +918,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_date_group_string_group_timestamp
-            Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: char(50)), _col3 (type: char(15)), _col4 (type: varchar(50)), _col5 (type: varchar(15)), _col6 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(15)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: varchar(15)), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 9 Data size: 4770 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1096,25 +1106,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_date_group_string_group_date
-            Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: char(50)), _col3 (type: char(15)), _col4 (type: varchar(50)), _col5 (type: varchar(15)), _col6 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(15)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: varchar(15)), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 9 Data size: 4764 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 1128 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1264,25 +1274,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_string
-            Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1425,25 +1435,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_char
-            Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(50)), c3 (type: char(50)), c4 (type: char(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(50)), _col3 (type: char(50)), _col4 (type: char(50)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(50)), VALUE._col3 (type: char(50)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 4222 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1586,25 +1596,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_char_trunc
-            Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(5)), c2 (type: char(5)), c3 (type: char(5)), c4 (type: char(5)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(5)), _col2 (type: char(5)), _col3 (type: char(5)), _col4 (type: char(5)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(5)), VALUE._col1 (type: char(5)), VALUE._col2 (type: char(5)), VALUE._col3 (type: char(5)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3142 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1747,25 +1757,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_varchar
-            Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(50)), c3 (type: varchar(50)), c4 (type: varchar(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(50)), _col3 (type: varchar(50)), _col4 (type: varchar(50)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(50)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3136 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1908,25 +1918,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_varchar_trunc
-            Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(5)), c2 (type: varchar(5)), c3 (type: varchar(5)), c4 (type: varchar(5)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(5)), _col2 (type: varchar(5)), _col3 (type: varchar(5)), _col4 (type: varchar(5)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(5)), VALUE._col1 (type: varchar(5)), VALUE._col2 (type: varchar(5)), VALUE._col3 (type: varchar(5)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3106 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 460 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2067,25 +2077,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_string
-            Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2226,25 +2236,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_char
-            Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(50)), c3 (type: char(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(50)), _col3 (type: char(50)), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(50)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3838 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2385,25 +2395,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_char_trunc
-            Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(7)), c2 (type: char(7)), c3 (type: char(7)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(7)), _col2 (type: char(7)), _col3 (type: char(7)), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(7)), VALUE._col1 (type: char(7)), VALUE._col2 (type: char(7)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3064 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2544,25 +2554,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_varchar
-            Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(50)), c3 (type: varchar(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(50)), _col3 (type: varchar(50)), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(50)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3046 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2703,25 +2713,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_varchar_trunc
-            Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(7)), c2 (type: varchar(7)), c3 (type: varchar(7)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(7)), _col2 (type: varchar(7)), _col3 (type: varchar(7)), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(7)), VALUE._col1 (type: varchar(7)), VALUE._col2 (type: varchar(7)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 3028 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 880 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2872,25 +2882,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_string
-            Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: varchar(50)), c4 (type: char(9)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(9)), _col3 (type: varchar(50)), _col4 (type: char(9)), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(9)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: char(9)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 4964 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1904 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3031,25 +3041,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_char
-            Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(9)), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(9)), _col3 (type: string), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(9)), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 4278 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1992 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3190,25 +3200,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_varchar
-            Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(9)), _col3 (type: string), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(9)), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 4026 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1524 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3363,25 +3373,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_tinyint
-            Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: smallint), c2 (type: int), c3 (type: bigint), c4 (type: decimal(38,18)), c5 (type: float), c6 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-              Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: decimal(38,18)), _col5 (type: float), _col6 (type: double), _col7 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: smallint), VALUE._col1 (type: int), VALUE._col2 (type: bigint), VALUE._col3 (type: decimal(38,18)), VALUE._col4 (type: float), VALUE._col5 (type: double), VALUE._col6 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-          Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1838 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 456 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3526,25 +3536,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_smallint
-            Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: int), c2 (type: bigint), c3 (type: decimal(38,18)), c4 (type: float), c5 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: bigint), _col3 (type: decimal(38,18)), _col4 (type: float), _col5 (type: double), _col6 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: bigint), VALUE._col2 (type: decimal(38,18)), VALUE._col3 (type: float), VALUE._col4 (type: double), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1826 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 464 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3687,25 +3697,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_int
-            Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: bigint), c2 (type: decimal(38,18)), c3 (type: float), c4 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: bigint), _col2 (type: decimal(38,18)), _col3 (type: float), _col4 (type: double), _col5 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: decimal(38,18)), VALUE._col2 (type: float), VALUE._col3 (type: double), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1786 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3846,25 +3856,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_bigint
-            Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: decimal(38,18)), c2 (type: float), c3 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: decimal(38,18)), _col2 (type: float), _col3 (type: double), _col4 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: decimal(38,18)), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1770 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -4003,25 +4013,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_decimal
-            Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: float), c2 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: float), _col2 (type: double), _col3 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: float), VALUE._col1 (type: double), VALUE._col2 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1898 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 1280 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -4158,25 +4168,25 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_float
-            Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: double), _col2 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 994 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat


[10/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/schema_evol_text_nonvec_mapwork_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/schema_evol_text_nonvec_mapwork_table.q.out b/ql/src/test/results/clientpositive/schema_evol_text_nonvec_mapwork_table.q.out
index 7a7e068..7426c5e 100644
--- a/ql/src/test/results/clientpositive/schema_evol_text_nonvec_mapwork_table.q.out
+++ b/ql/src/test/results/clientpositive/schema_evol_text_nonvec_mapwork_table.q.out
@@ -48,6 +48,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -183,24 +188,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_add_int_permute_select
-            Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), a (type: int), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 111 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -313,6 +318,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -451,24 +461,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_add_int_string_permute_select
-            Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), a (type: int), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 155 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -734,24 +744,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_double
-            Statistics: Num rows: 10 Data size: 550 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 318 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: double), c2 (type: double), c3 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 550 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 318 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 550 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 550 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 318 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 550 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 318 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -905,24 +915,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_date_group_string_group_timestamp
-            Statistics: Num rows: 9 Data size: 1326 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 9 Data size: 1326 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 9 Data size: 1326 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: char(50)), _col3 (type: char(15)), _col4 (type: varchar(50)), _col5 (type: varchar(15)), _col6 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(15)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: varchar(15)), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 9 Data size: 1326 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 9 Data size: 1326 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 450 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1092,24 +1102,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_date_group_string_group_date
-            Statistics: Num rows: 9 Data size: 825 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: char(50)), c3 (type: char(15)), c4 (type: varchar(50)), c5 (type: varchar(15)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 9 Data size: 825 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 9 Data size: 825 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: char(50)), _col3 (type: char(15)), _col4 (type: varchar(50)), _col5 (type: varchar(15)), _col6 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(15)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: varchar(15)), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 9 Data size: 825 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 9 Data size: 825 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 195 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1259,24 +1269,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_string
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), c4 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1419,24 +1429,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_char
-            Statistics: Num rows: 10 Data size: 1405 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(50)), c3 (type: char(50)), c4 (type: char(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 1405 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1405 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(50)), _col3 (type: char(50)), _col4 (type: char(50)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(50)), VALUE._col3 (type: char(50)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 1405 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1405 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1579,24 +1589,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_char_trunc
-            Statistics: Num rows: 10 Data size: 325 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(5)), c2 (type: char(5)), c3 (type: char(5)), c4 (type: char(5)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 325 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 325 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(5)), _col2 (type: char(5)), _col3 (type: char(5)), _col4 (type: char(5)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(5)), VALUE._col1 (type: char(5)), VALUE._col2 (type: char(5)), VALUE._col3 (type: char(5)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 325 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 325 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1739,24 +1749,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_varchar
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(50)), c3 (type: varchar(50)), c4 (type: varchar(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(50)), _col3 (type: varchar(50)), _col4 (type: varchar(50)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(50)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: varchar(50)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 330 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1899,24 +1909,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_multi_ints_varchar_trunc
-            Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(5)), c2 (type: varchar(5)), c3 (type: varchar(5)), c4 (type: varchar(5)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(5)), _col2 (type: varchar(5)), _col3 (type: varchar(5)), _col4 (type: varchar(5)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(5)), VALUE._col1 (type: varchar(5)), VALUE._col2 (type: varchar(5)), VALUE._col3 (type: varchar(5)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 304 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 150 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2057,24 +2067,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_string
-            Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: string), c2 (type: string), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2215,24 +2225,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_char
-            Statistics: Num rows: 10 Data size: 1169 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(50)), c3 (type: char(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 1169 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 1169 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(50)), _col3 (type: char(50)), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(50)), VALUE._col2 (type: char(50)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 1169 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 1169 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2373,24 +2383,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_char_trunc
-            Statistics: Num rows: 10 Data size: 390 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(7)), c2 (type: char(7)), c3 (type: char(7)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 390 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 390 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(7)), _col2 (type: char(7)), _col3 (type: char(7)), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(7)), VALUE._col1 (type: char(7)), VALUE._col2 (type: char(7)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 390 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 390 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 215 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2531,24 +2541,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_varchar
-            Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(50)), c3 (type: varchar(50)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(50)), _col3 (type: varchar(50)), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(50)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 368 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 203 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2689,24 +2699,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_numeric_group_string_group_floating_varchar_trunc
-            Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(7)), c2 (type: varchar(7)), c3 (type: varchar(7)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(7)), _col2 (type: varchar(7)), _col3 (type: varchar(7)), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(7)), VALUE._col1 (type: varchar(7)), VALUE._col2 (type: varchar(7)), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 362 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 220 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2857,24 +2867,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_string
-            Statistics: Num rows: 10 Data size: 791 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: varchar(50)), c4 (type: char(9)), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 791 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 791 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(9)), _col3 (type: varchar(50)), _col4 (type: char(9)), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(9)), VALUE._col2 (type: varchar(50)), VALUE._col3 (type: char(9)), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 791 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 791 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 236 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3015,24 +3025,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_char
-            Statistics: Num rows: 10 Data size: 937 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 652 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: varchar(50)), c2 (type: varchar(9)), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 937 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 652 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 937 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 652 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: varchar(50)), _col2 (type: varchar(9)), _col3 (type: string), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(50)), VALUE._col1 (type: varchar(9)), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 937 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 652 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 937 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 652 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3173,24 +3183,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_string_group_string_group_varchar
-            Statistics: Num rows: 10 Data size: 682 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: char(50)), c2 (type: char(9)), c3 (type: string), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 682 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 682 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: char(50)), _col2 (type: char(9)), _col3 (type: string), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(50)), VALUE._col1 (type: char(9)), VALUE._col2 (type: string), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 682 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 682 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 187 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3345,24 +3355,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_tinyint
-            Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: smallint), c2 (type: int), c3 (type: bigint), c4 (type: decimal(38,18)), c5 (type: float), c6 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-              Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: decimal(38,18)), _col5 (type: float), _col6 (type: double), _col7 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: smallint), VALUE._col1 (type: int), VALUE._col2 (type: bigint), VALUE._col3 (type: decimal(38,18)), VALUE._col4 (type: float), VALUE._col5 (type: double), VALUE._col6 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-          Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 446 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 118 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3507,24 +3517,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_smallint
-            Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: int), c2 (type: bigint), c3 (type: decimal(38,18)), c4 (type: float), c5 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: bigint), _col3 (type: decimal(38,18)), _col4 (type: float), _col5 (type: double), _col6 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: bigint), VALUE._col2 (type: decimal(38,18)), VALUE._col3 (type: float), VALUE._col4 (type: double), VALUE._col5 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 445 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 130 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3667,24 +3677,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_int
-            Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: bigint), c2 (type: decimal(38,18)), c3 (type: float), c4 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: bigint), _col2 (type: decimal(38,18)), _col3 (type: float), _col4 (type: double), _col5 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: decimal(38,18)), VALUE._col2 (type: float), VALUE._col3 (type: double), VALUE._col4 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 429 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 132 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3825,24 +3835,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_bigint
-            Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: decimal(38,18)), c2 (type: float), c3 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: decimal(38,18)), _col2 (type: float), _col3 (type: double), _col4 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: decimal(38,18)), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 411 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 127 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -3981,24 +3991,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_decimal
-            Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: float), c2 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: float), _col2 (type: double), _col3 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: float), VALUE._col1 (type: double), VALUE._col2 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 335 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 238 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -4135,24 +4145,24 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: table_change_lower_to_higher_numeric_group_float
-            Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: insert_num (type: int), c1 (type: double), b (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: int)
                 sort order: +
-                Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: double), _col2 (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 10 Data size: 148 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 78 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat


[06/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative3.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative3.q.out
index 845e450..aafd18a 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative3.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative3.q.out
@@ -198,6 +198,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test1
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test1 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -217,6 +219,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test1
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test1 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -305,6 +309,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test1
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test1 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -324,6 +330,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test1
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test1 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -395,6 +403,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test2
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test2 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -414,6 +424,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test2
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test2 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -502,6 +514,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test2
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test2 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -521,6 +535,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test2
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test2 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -589,6 +605,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test1
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test1 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -608,6 +626,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test1
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test1 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -690,6 +710,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test1
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test1 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -709,6 +731,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test1
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test1 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -775,6 +799,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test2
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test2 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -794,6 +820,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test2
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test2 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -876,6 +904,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test1
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test1 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -895,6 +925,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test1
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test1 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -961,6 +993,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test3
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test3 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -980,6 +1014,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test3
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test3 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1062,6 +1098,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test1
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test1 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1081,6 +1119,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test1
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test1 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1147,6 +1187,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test4
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test4 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1166,6 +1208,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test4
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test4 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1248,6 +1292,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test1
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test1 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1267,6 +1313,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test1
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test1 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1333,6 +1381,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test3
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test3 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1352,6 +1402,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test3
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test3 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1434,6 +1486,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test2
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test2 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1453,6 +1507,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test2
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test2 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1519,6 +1575,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test4
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test4 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1538,6 +1596,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test4
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test4 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1620,6 +1680,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test2
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test2 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1639,6 +1701,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test2
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test2 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1705,6 +1769,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test4
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test4 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1724,6 +1790,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test4
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test4 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1806,6 +1874,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.test3
                     numFiles 3
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct test3 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1825,6 +1895,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.test3
                       numFiles 3
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct test3 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out b/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
index e21b1b1..c8503cd 100644
--- a/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
+++ b/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
@@ -111,6 +111,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count 2
                         bucket_field_name key
                         columns key,value
@@ -118,9 +119,13 @@ STAGE PLANS:
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.bucket2_1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct bucket2_1 { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.bucket2_1
@@ -137,6 +142,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -144,9 +150,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucket2_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket2_1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket2_1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/groupby_map_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_map_ppr.q.out b/ql/src/test/results/clientpositive/spark/groupby_map_ppr.q.out
index 7660305..f97f63e 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_map_ppr.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_map_ppr.q.out
@@ -183,15 +183,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,c1,c2
                           columns.comments 
                           columns.types string:int:string
 #### A masked pattern was here ####
                           name default.dest1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct dest1 { string key, i32 c1, string c2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.dest1
@@ -208,15 +213,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,c1,c2
                 columns.comments 
                 columns.types string:int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { string key, i32 c1, string c2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/groupby_map_ppr_multi_distinct.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_map_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/spark/groupby_map_ppr_multi_distinct.q.out
index 0608adc..c833657 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_map_ppr_multi_distinct.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_map_ppr_multi_distinct.q.out
@@ -183,15 +183,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,c1,c2,c3,c4
                           columns.comments 
                           columns.types string:int:string:int:int
 #### A masked pattern was here ####
                           name default.dest1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.dest1
@@ -208,15 +213,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,c1,c2,c3,c4
                 columns.comments 
                 columns.types string:int:string:int:int
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out b/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out
index 6abeb5d..a2c2ced 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out
@@ -176,15 +176,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,c1,c2
                           columns.comments 
                           columns.types string:int:string
 #### A masked pattern was here ####
                           name default.dest1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct dest1 { string key, i32 c1, string c2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.dest1
@@ -201,15 +206,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,c1,c2
                 columns.comments 
                 columns.types string:int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { string key, i32 c1, string c2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out
index 66d6cb6..531854b 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out
@@ -176,15 +176,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,c1,c2,c3,c4
                           columns.comments 
                           columns.types string:int:string:int:int
 #### A masked pattern was here ####
                           name default.dest1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.dest1
@@ -201,15 +206,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,c1,c2,c3,c4
                 columns.comments 
                 columns.types string:int:string:int:int
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out b/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out
index 1b4822e..a21ea9c 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out
@@ -95,15 +95,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key,cnt
                                 columns.comments 
                                 columns.types int:int
 #### A masked pattern was here ####
                                 name default.outputtbl1
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.outputtbl1
@@ -173,15 +178,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,cnt
                 columns.comments 
                 columns.types int:int
 #### A masked pattern was here ####
                 name default.outputtbl1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl1
@@ -347,15 +357,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key1,key2,cnt
                           columns.comments 
                           columns.types int:string:int
 #### A masked pattern was here ####
                           name default.outputtbl2
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.outputtbl2
@@ -372,15 +387,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,cnt
                 columns.comments 
                 columns.types int:string:int
 #### A masked pattern was here ####
                 name default.outputtbl2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl2
@@ -839,15 +859,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key1,key2,cnt
                                 columns.comments 
                                 columns.types int:int:int
 #### A masked pattern was here ####
                                 name default.outputtbl3
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.outputtbl3
@@ -917,15 +942,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,cnt
                 columns.comments 
                 columns.types int:int:int
 #### A masked pattern was here ####
                 name default.outputtbl3
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl3
@@ -1092,15 +1122,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key1,key2,key3,cnt
                           columns.comments 
                           columns.types int:int:string:int
 #### A masked pattern was here ####
                           name default.outputtbl4
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.outputtbl4
@@ -1117,15 +1152,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,key3,cnt
                 columns.comments 
                 columns.types int:int:string:int
 #### A masked pattern was here ####
                 name default.outputtbl4
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl4
@@ -3251,15 +3291,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key1,key2,key3,key4,cnt
                                 columns.comments 
                                 columns.types int:int:string:int:int
 #### A masked pattern was here ####
                                 name default.outputtbl5
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.outputtbl5
@@ -3329,15 +3374,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,key3,key4,cnt
                 columns.comments 
                 columns.types int:int:string:int:int
 #### A masked pattern was here ####
                 name default.outputtbl5
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl5

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/groupby_sort_skew_1_23.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_sort_skew_1_23.q.out b/ql/src/test/results/clientpositive/spark/groupby_sort_skew_1_23.q.out
index 0956771..d0f5952 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_sort_skew_1_23.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_sort_skew_1_23.q.out
@@ -95,15 +95,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key,cnt
                                 columns.comments 
                                 columns.types int:int
 #### A masked pattern was here ####
                                 name default.outputtbl1
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.outputtbl1
@@ -173,15 +178,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,cnt
                 columns.comments 
                 columns.types int:int
 #### A masked pattern was here ####
                 name default.outputtbl1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl1
@@ -366,15 +376,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key1,key2,cnt
                           columns.comments 
                           columns.types int:string:int
 #### A masked pattern was here ####
                           name default.outputtbl2
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.outputtbl2
@@ -391,15 +406,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,cnt
                 columns.comments 
                 columns.types int:string:int
 #### A masked pattern was here ####
                 name default.outputtbl2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl2
@@ -858,15 +878,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key1,key2,cnt
                                 columns.comments 
                                 columns.types int:int:int
 #### A masked pattern was here ####
                                 name default.outputtbl3
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.outputtbl3
@@ -936,15 +961,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,cnt
                 columns.comments 
                 columns.types int:int:int
 #### A masked pattern was here ####
                 name default.outputtbl3
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl3
@@ -1130,15 +1160,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key1,key2,key3,cnt
                           columns.comments 
                           columns.types int:int:string:int
 #### A masked pattern was here ####
                           name default.outputtbl4
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.outputtbl4
@@ -1155,15 +1190,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,key3,cnt
                 columns.comments 
                 columns.types int:int:string:int
 #### A masked pattern was here ####
                 name default.outputtbl4
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl4
@@ -3384,15 +3424,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key1,key2,key3,key4,cnt
                                 columns.comments 
                                 columns.types int:int:string:int:int
 #### A masked pattern was here ####
                                 name default.outputtbl5
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.outputtbl5
@@ -3462,15 +3507,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,key3,key4,cnt
                 columns.comments 
                 columns.types int:int:string:int:int
 #### A masked pattern was here ####
                 name default.outputtbl5
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl5

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/input_part2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/input_part2.q.out b/ql/src/test/results/clientpositive/spark/input_part2.q.out
index 73a3679..4799a7f 100644
--- a/ql/src/test/results/clientpositive/spark/input_part2.q.out
+++ b/ql/src/test/results/clientpositive/spark/input_part2.q.out
@@ -65,15 +65,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value,hr,ds
                               columns.comments 
                               columns.types int:string:string:string
 #### A masked pattern was here ####
                               name default.dest1
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest1
@@ -99,15 +104,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value,hr,ds
                               columns.comments 
                               columns.types int:string:string:string
 #### A masked pattern was here ####
                               name default.dest2
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest2 { i32 key, string value, string hr, string ds}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest2
@@ -222,15 +232,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -248,15 +263,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest2

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/join17.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join17.q.out b/ql/src/test/results/clientpositive/spark/join17.q.out
index 0d92441..3acf7f9 100644
--- a/ql/src/test/results/clientpositive/spark/join17.q.out
+++ b/ql/src/test/results/clientpositive/spark/join17.q.out
@@ -200,15 +200,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key1,value1,key2,value2
                           columns.comments 
                           columns.types int:string:int:string
 #### A masked pattern was here ####
                           name default.dest1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct dest1 { i32 key1, string value1, i32 key2, string value2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.dest1
@@ -225,15 +230,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,value1,key2,value2
                 columns.comments 
                 columns.types int:string:int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key1, string value1, i32 key2, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/join26.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join26.q.out b/ql/src/test/results/clientpositive/spark/join26.q.out
index c700b87..4967ab6 100644
--- a/ql/src/test/results/clientpositive/spark/join26.q.out
+++ b/ql/src/test/results/clientpositive/spark/join26.q.out
@@ -211,15 +211,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key,value,val2
                                 columns.comments 
                                 columns.types string:string:string
 #### A masked pattern was here ####
                                 name default.dest_j1
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.dest_j1
@@ -289,15 +294,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/join32.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join32.q.out b/ql/src/test/results/clientpositive/spark/join32.q.out
index fc3dcc2..be29cd5 100644
--- a/ql/src/test/results/clientpositive/spark/join32.q.out
+++ b/ql/src/test/results/clientpositive/spark/join32.q.out
@@ -233,15 +233,20 @@ STAGE PLANS:
                                   input format: org.apache.hadoop.mapred.TextInputFormat
                                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                                   properties:
+                                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                     bucket_count -1
                                     columns key,value,val2
                                     columns.comments 
                                     columns.types string:string:string
 #### A masked pattern was here ####
                                     name default.dest_j1
+                                    numFiles 0
+                                    numRows 0
+                                    rawDataSize 0
                                     serialization.ddl struct dest_j1 { string key, string value, string val2}
                                     serialization.format 1
                                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                    totalSize 0
 #### A masked pattern was here ####
                                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                                   name: default.dest_j1
@@ -311,15 +316,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out b/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
index 4d5110f..8a99a56 100644
--- a/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
+++ b/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
@@ -241,15 +241,20 @@ STAGE PLANS:
                                   input format: org.apache.hadoop.mapred.TextInputFormat
                                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                                   properties:
+                                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                     bucket_count -1
                                     columns key,value,val2
                                     columns.comments 
                                     columns.types string:string:string
 #### A masked pattern was here ####
                                     name default.dest_j1
+                                    numFiles 0
+                                    numRows 0
+                                    rawDataSize 0
                                     serialization.ddl struct dest_j1 { string key, string value, string val2}
                                     serialization.format 1
                                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                    totalSize 0
 #### A masked pattern was here ####
                                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                                   name: default.dest_j1
@@ -319,15 +324,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1
@@ -1202,15 +1212,20 @@ STAGE PLANS:
                                     input format: org.apache.hadoop.mapred.TextInputFormat
                                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                                     properties:
+                                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                       bucket_count -1
                                       columns key,value,val2
                                       columns.comments 
                                       columns.types string:string:string
 #### A masked pattern was here ####
                                       name default.dest_j2
+                                      numFiles 0
+                                      numRows 0
+                                      rawDataSize 0
                                       serialization.ddl struct dest_j2 { string key, string value, string val2}
                                       serialization.format 1
                                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                      totalSize 0
 #### A masked pattern was here ####
                                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                                     name: default.dest_j2
@@ -1278,15 +1293,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j2 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j2

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/join33.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join33.q.out b/ql/src/test/results/clientpositive/spark/join33.q.out
index fc3dcc2..be29cd5 100644
--- a/ql/src/test/results/clientpositive/spark/join33.q.out
+++ b/ql/src/test/results/clientpositive/spark/join33.q.out
@@ -233,15 +233,20 @@ STAGE PLANS:
                                   input format: org.apache.hadoop.mapred.TextInputFormat
                                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                                   properties:
+                                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                     bucket_count -1
                                     columns key,value,val2
                                     columns.comments 
                                     columns.types string:string:string
 #### A masked pattern was here ####
                                     name default.dest_j1
+                                    numFiles 0
+                                    numRows 0
+                                    rawDataSize 0
                                     serialization.ddl struct dest_j1 { string key, string value, string val2}
                                     serialization.format 1
                                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                    totalSize 0
 #### A masked pattern was here ####
                                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                                   name: default.dest_j1
@@ -311,15 +316,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/join34.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join34.q.out b/ql/src/test/results/clientpositive/spark/join34.q.out
index 8d0b649..235d36a 100644
--- a/ql/src/test/results/clientpositive/spark/join34.q.out
+++ b/ql/src/test/results/clientpositive/spark/join34.q.out
@@ -288,15 +288,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value,val2
                           columns.comments 
                           columns.types string:string:string
 #### A masked pattern was here ####
                           name default.dest_j1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct dest_j1 { string key, string value, string val2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.dest_j1
@@ -313,15 +318,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/join35.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join35.q.out b/ql/src/test/results/clientpositive/spark/join35.q.out
index 7f9aa24..7b873c6 100644
--- a/ql/src/test/results/clientpositive/spark/join35.q.out
+++ b/ql/src/test/results/clientpositive/spark/join35.q.out
@@ -320,15 +320,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value,val2
                           columns.comments 
                           columns.types string:string:int
 #### A masked pattern was here ####
                           name default.dest_j1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct dest_j1 { string key, string value, i32 val2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.dest_j1
@@ -363,15 +368,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:int
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, i32 val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/join9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join9.q.out b/ql/src/test/results/clientpositive/spark/join9.q.out
index 714302e..4119855 100644
--- a/ql/src/test/results/clientpositive/spark/join9.q.out
+++ b/ql/src/test/results/clientpositive/spark/join9.q.out
@@ -201,15 +201,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value
                           columns.comments 
                           columns.types int:string
 #### A masked pattern was here ####
                           name default.dest1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct dest1 { i32 key, string value}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.dest1
@@ -226,15 +231,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out b/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out
index 9341862..4d6d39d 100644
--- a/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out
+++ b/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out
@@ -213,15 +213,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key,value,val2
                                 columns.comments 
                                 columns.types string:string:string
 #### A masked pattern was here ####
                                 name default.dest_j1
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.dest_j1
@@ -291,15 +296,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/mapjoin_memcheck.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/mapjoin_memcheck.q.out b/ql/src/test/results/clientpositive/spark/mapjoin_memcheck.q.out
index 2f41847..8edcdcb 100644
--- a/ql/src/test/results/clientpositive/spark/mapjoin_memcheck.q.out
+++ b/ql/src/test/results/clientpositive/spark/mapjoin_memcheck.q.out
@@ -42,14 +42,14 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: src1
-                  Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
                       Spark HashTable Sink Operator
                         keys:
                           0 _col0 (type: string)
@@ -65,14 +65,14 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: src1
-                  Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
                       Map Join Operator
                         condition map:
                              Inner Join 0 to 1
@@ -82,10 +82,10 @@ STAGE PLANS:
                         outputColumnNames: _col0, _col1, _col2, _col3
                         input vertices:
                           1 Map 2
-                        Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE
                         File Output Operator
                           compressed: false
-                          Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
+                          Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE
                           table:
                               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat


[20/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out
index 78cd03d..1d59a0d 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out
@@ -146,8 +146,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -191,8 +193,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -314,8 +318,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -416,8 +422,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -461,8 +469,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -505,8 +515,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -587,8 +599,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -631,8 +645,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -733,8 +749,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -778,8 +796,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -822,8 +842,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -933,8 +955,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -978,8 +1002,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out
index 7ddf6b3..026fde7 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out
@@ -146,8 +146,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -289,8 +291,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -409,8 +413,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -452,8 +458,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -554,8 +562,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -598,8 +608,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -642,8 +654,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -723,8 +737,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -825,8 +841,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -869,8 +887,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -913,8 +933,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1025,8 +1047,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out
index 5b68da2..8037ff5 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out
@@ -162,8 +162,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -305,8 +307,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -425,8 +429,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -468,8 +474,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -570,8 +578,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -614,8 +624,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -658,8 +670,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -739,8 +753,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -841,8 +857,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -885,8 +903,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -929,8 +949,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1041,8 +1063,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out
index 07bcb89..e2deba4 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out
@@ -132,6 +132,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -151,6 +153,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.bucket_big
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket_big { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -266,6 +270,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -285,6 +291,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.bucket_big
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket_big { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -435,6 +443,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -454,6 +464,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.bucket_big
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket_big { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -491,6 +503,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.bucket_small
                 numFiles 4
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket_small { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -607,6 +621,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -626,6 +642,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.bucket_big
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket_big { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -663,6 +681,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.bucket_small
                 numFiles 4
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket_small { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -752,6 +772,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -771,6 +793,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.bucket_big
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket_big { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out
index d12c1ca..d7a053f 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out
@@ -179,8 +179,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -224,8 +226,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -370,8 +374,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -415,8 +421,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -538,8 +546,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -581,8 +591,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -683,8 +695,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -728,8 +742,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -772,8 +788,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -816,8 +834,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -898,8 +918,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -942,8 +964,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1044,8 +1068,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1089,8 +1115,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1133,8 +1161,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1177,8 +1207,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1289,8 +1321,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1334,8 +1368,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out
index 23a3685..6db35a6 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out
@@ -179,8 +179,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -224,8 +226,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -370,8 +374,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -415,8 +421,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -540,8 +548,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -583,8 +593,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -685,8 +697,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -730,8 +744,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -774,8 +790,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -818,8 +836,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -900,8 +920,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -944,8 +966,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1046,8 +1070,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1091,8 +1117,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1135,8 +1163,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1179,8 +1209,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1291,8 +1323,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1336,8 +1370,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/binary_output_format.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/binary_output_format.q.out b/ql/src/test/results/clientpositive/binary_output_format.q.out
index 608d475..51328e2 100644
--- a/ql/src/test/results/clientpositive/binary_output_format.q.out
+++ b/ql/src/test/results/clientpositive/binary_output_format.q.out
@@ -99,16 +99,21 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns mydata
                         columns.comments 
                         columns.types string
 #### A masked pattern was here ####
                         name default.dest1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct dest1 { string mydata}
                         serialization.format 1
                         serialization.last.column.takes.rest true
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.dest1
@@ -183,16 +188,21 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns mydata
                 columns.comments 
                 columns.types string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { string mydata}
                 serialization.format 1
                 serialization.last.column.takes.rest true
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -215,16 +225,21 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns mydata
                     columns.comments 
                     columns.types string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { string mydata}
                     serialization.format 1
                     serialization.last.column.takes.rest true
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -240,32 +255,42 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns mydata
               columns.comments 
               columns.types string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { string mydata}
               serialization.format 1
               serialization.last.column.takes.rest true
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns mydata
                 columns.comments 
                 columns.types string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { string mydata}
                 serialization.format 1
                 serialization.last.column.takes.rest true
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -287,16 +312,21 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns mydata
                     columns.comments 
                     columns.types string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { string mydata}
                     serialization.format 1
                     serialization.last.column.takes.rest true
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -312,32 +342,42 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns mydata
               columns.comments 
               columns.types string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { string mydata}
               serialization.format 1
               serialization.last.column.takes.rest true
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns mydata
                 columns.comments 
                 columns.types string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { string mydata}
                 serialization.format 1
                 serialization.last.column.takes.rest true
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucket1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket1.q.out b/ql/src/test/results/clientpositive/bucket1.q.out
index 96b8d4e..78fb530 100644
--- a/ql/src/test/results/clientpositive/bucket1.q.out
+++ b/ql/src/test/results/clientpositive/bucket1.q.out
@@ -109,6 +109,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 properties:
+                  COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                   bucket_count 100
                   bucket_field_name key
                   columns key,value
@@ -116,9 +117,13 @@ STAGE PLANS:
                   columns.types int:string
 #### A masked pattern was here ####
                   name default.bucket1_1
+                  numFiles 0
+                  numRows 0
+                  rawDataSize 0
                   serialization.ddl struct bucket1_1 { i32 key, string value}
                   serialization.format 1
                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  totalSize 0
 #### A masked pattern was here ####
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: default.bucket1_1
@@ -135,6 +140,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count 100
                 bucket_field_name key
                 columns key,value
@@ -142,9 +148,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucket1_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket1_1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket1_1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucket2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket2.q.out b/ql/src/test/results/clientpositive/bucket2.q.out
index b5572fd..297984e 100644
--- a/ql/src/test/results/clientpositive/bucket2.q.out
+++ b/ql/src/test/results/clientpositive/bucket2.q.out
@@ -109,6 +109,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 properties:
+                  COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                   bucket_count 2
                   bucket_field_name key
                   columns key,value
@@ -116,9 +117,13 @@ STAGE PLANS:
                   columns.types int:string
 #### A masked pattern was here ####
                   name default.bucket2_1
+                  numFiles 0
+                  numRows 0
+                  rawDataSize 0
                   serialization.ddl struct bucket2_1 { i32 key, string value}
                   serialization.format 1
                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  totalSize 0
 #### A masked pattern was here ####
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: default.bucket2_1
@@ -135,6 +140,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -142,9 +148,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucket2_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket2_1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket2_1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucket4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket4.q.out b/ql/src/test/results/clientpositive/bucket4.q.out
index c4baf72..803a2bb 100644
--- a/ql/src/test/results/clientpositive/bucket4.q.out
+++ b/ql/src/test/results/clientpositive/bucket4.q.out
@@ -106,6 +106,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 properties:
+                  COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                   SORTBUCKETCOLSPREFIX TRUE
                   bucket_count 2
                   bucket_field_name key
@@ -114,9 +115,13 @@ STAGE PLANS:
                   columns.types int:string
 #### A masked pattern was here ####
                   name default.bucket4_1
+                  numFiles 0
+                  numRows 0
+                  rawDataSize 0
                   serialization.ddl struct bucket4_1 { i32 key, string value}
                   serialization.format 1
                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  totalSize 0
 #### A masked pattern was here ####
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: default.bucket4_1
@@ -133,6 +138,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 2
                 bucket_field_name key
@@ -141,9 +147,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucket4_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket4_1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket4_1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucket5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket5.q.out b/ql/src/test/results/clientpositive/bucket5.q.out
index dee79f9..2e37eef 100644
--- a/ql/src/test/results/clientpositive/bucket5.q.out
+++ b/ql/src/test/results/clientpositive/bucket5.q.out
@@ -151,6 +151,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 properties:
+                  COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                   SORTBUCKETCOLSPREFIX TRUE
                   bucket_count 2
                   bucket_field_name key
@@ -159,9 +160,13 @@ STAGE PLANS:
                   columns.types int:string
 #### A masked pattern was here ####
                   name default.bucketed_table
+                  numFiles 0
+                  numRows 0
+                  rawDataSize 0
                   serialization.ddl struct bucketed_table { i32 key, string value}
                   serialization.format 1
                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  totalSize 0
 #### A masked pattern was here ####
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: default.bucketed_table
@@ -178,6 +183,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 2
                 bucket_field_name key
@@ -186,9 +192,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucketed_table
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketed_table { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketed_table
@@ -253,15 +263,20 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 properties:
+                  COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                   bucket_count -1
                   columns key,value
                   columns.comments 
                   columns.types int:string
 #### A masked pattern was here ####
                   name default.unbucketed_table
+                  numFiles 0
+                  numRows 0
+                  rawDataSize 0
                   serialization.ddl struct unbucketed_table { i32 key, string value}
                   serialization.format 1
                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  totalSize 0
 #### A masked pattern was here ####
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: default.unbucketed_table
@@ -287,15 +302,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.unbucketed_table
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct unbucketed_table { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.unbucketed_table
@@ -318,15 +338,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.unbucketed_table
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct unbucketed_table { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.unbucketed_table
@@ -342,30 +367,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.unbucketed_table
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct unbucketed_table { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.unbucketed_table
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct unbucketed_table { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.unbucketed_table
@@ -387,15 +422,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.unbucketed_table
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct unbucketed_table { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.unbucketed_table
@@ -411,30 +451,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.unbucketed_table
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct unbucketed_table { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.unbucketed_table
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct unbucketed_table { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.unbucketed_table

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucket_many.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_many.q.out b/ql/src/test/results/clientpositive/bucket_many.q.out
index 90d9fb8..9bd90b1 100644
--- a/ql/src/test/results/clientpositive/bucket_many.q.out
+++ b/ql/src/test/results/clientpositive/bucket_many.q.out
@@ -105,6 +105,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 properties:
+                  COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                   bucket_count 256
                   bucket_field_name key
                   columns key,value
@@ -112,9 +113,13 @@ STAGE PLANS:
                   columns.types int:string
 #### A masked pattern was here ####
                   name default.bucket_many
+                  numFiles 0
+                  numRows 0
+                  rawDataSize 0
                   serialization.ddl struct bucket_many { i32 key, string value}
                   serialization.format 1
                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  totalSize 0
 #### A masked pattern was here ####
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: default.bucket_many
@@ -131,6 +136,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count 256
                 bucket_field_name key
                 columns key,value
@@ -138,9 +144,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucket_many
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket_many { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket_many

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucket_map_join_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_map_join_1.q.out b/ql/src/test/results/clientpositive/bucket_map_join_1.q.out
index 49bf2ca..418e5aa 100644
--- a/ql/src/test/results/clientpositive/bucket_map_join_1.q.out
+++ b/ql/src/test/results/clientpositive/bucket_map_join_1.q.out
@@ -135,6 +135,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.table1
               numFiles 1
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct table1 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -154,6 +156,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.table1
                 numFiles 1
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct table1 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucket_map_join_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_map_join_2.q.out b/ql/src/test/results/clientpositive/bucket_map_join_2.q.out
index 9ec7033..01ad865 100644
--- a/ql/src/test/results/clientpositive/bucket_map_join_2.q.out
+++ b/ql/src/test/results/clientpositive/bucket_map_join_2.q.out
@@ -135,6 +135,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.table1
               numFiles 1
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct table1 { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -154,6 +156,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.table1
                 numFiles 1
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct table1 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out
index 19937cb..2435583 100644
--- a/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out
+++ b/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out
@@ -145,8 +145,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -232,15 +234,20 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
+                            COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                             bucket_count -1
                             columns key,value1,value2
                             columns.comments 
                             columns.types string:string:string
 #### A masked pattern was here ####
                             name default.bucketmapjoin_tmp_result
+                            numFiles 0
+                            numRows 0
+                            rawDataSize 0
                             serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            totalSize 0
 #### A masked pattern was here ####
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: default.bucketmapjoin_tmp_result
@@ -268,8 +275,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -312,8 +321,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_2
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -352,15 +363,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -455,8 +471,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -583,8 +601,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -627,8 +647,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_2
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out
index 90528be..5c29409 100644
--- a/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out
+++ b/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out
@@ -129,8 +129,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -216,15 +218,20 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
+                            COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                             bucket_count -1
                             columns key,value1,value2
                             columns.comments 
                             columns.types string:string:string
 #### A masked pattern was here ####
                             name default.bucketmapjoin_tmp_result
+                            numFiles 0
+                            numRows 0
+                            rawDataSize 0
                             serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            totalSize 0
 #### A masked pattern was here ####
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: default.bucketmapjoin_tmp_result
@@ -252,8 +259,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -296,8 +305,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_2
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -336,15 +347,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -439,8 +455,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -567,8 +585,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -611,8 +631,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part_2
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe


[07/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucket2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket2.q.out b/ql/src/test/results/clientpositive/spark/bucket2.q.out
index aa47459..f9d4782 100644
--- a/ql/src/test/results/clientpositive/spark/bucket2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket2.q.out
@@ -115,6 +115,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count 2
                         bucket_field_name key
                         columns key,value
@@ -122,9 +123,13 @@ STAGE PLANS:
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.bucket2_1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct bucket2_1 { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.bucket2_1
@@ -141,6 +146,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -148,9 +154,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucket2_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket2_1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket2_1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucket4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket4.q.out b/ql/src/test/results/clientpositive/spark/bucket4.q.out
index 680cee7..68f8143 100644
--- a/ql/src/test/results/clientpositive/spark/bucket4.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket4.q.out
@@ -112,6 +112,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         SORTBUCKETCOLSPREFIX TRUE
                         bucket_count 2
                         bucket_field_name key
@@ -120,9 +121,13 @@ STAGE PLANS:
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.bucket4_1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct bucket4_1 { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.bucket4_1
@@ -139,6 +144,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 2
                 bucket_field_name key
@@ -147,9 +153,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucket4_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket4_1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket4_1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucket5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket5.q.out b/ql/src/test/results/clientpositive/spark/bucket5.q.out
index 0504eac..a78fae0 100644
--- a/ql/src/test/results/clientpositive/spark/bucket5.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket5.q.out
@@ -199,6 +199,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         SORTBUCKETCOLSPREFIX TRUE
                         bucket_count 2
                         bucket_field_name key
@@ -207,9 +208,13 @@ STAGE PLANS:
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.bucketed_table
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct bucketed_table { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.bucketed_table
@@ -234,15 +239,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value
                         columns.comments 
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.unbucketed_table
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct unbucketed_table { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.unbucketed_table
@@ -259,6 +269,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 2
                 bucket_field_name key
@@ -267,9 +278,13 @@ STAGE PLANS:
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.bucketed_table
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketed_table { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketed_table
@@ -287,15 +302,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.unbucketed_table
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct unbucketed_table { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.unbucketed_table

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucket_map_join_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket_map_join_1.q.out b/ql/src/test/results/clientpositive/spark/bucket_map_join_1.q.out
index 299cedf..5bdec32 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_1.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_1.q.out
@@ -101,6 +101,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.table2
                     numFiles 1
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct table2 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -120,6 +122,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.table2
                       numFiles 1
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct table2 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -189,6 +193,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.table1
                     numFiles 1
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct table1 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -208,6 +214,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.table1
                       numFiles 1
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct table1 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucket_map_join_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket_map_join_2.q.out b/ql/src/test/results/clientpositive/spark/bucket_map_join_2.q.out
index f330bf2..5ec1af9 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_2.q.out
@@ -101,6 +101,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.table2
                     numFiles 1
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct table2 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -120,6 +122,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.table2
                       numFiles 1
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct table2 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -189,6 +193,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.table1
                     numFiles 1
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct table1 { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -208,6 +214,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.table1
                       numFiles 1
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct table1 { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucket_map_join_spark1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket_map_join_spark1.q.out b/ql/src/test/results/clientpositive/spark/bucket_map_join_spark1.q.out
index b32528c..f242ac1 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_spark1.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_spark1.q.out
@@ -172,8 +172,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -248,15 +250,20 @@ STAGE PLANS:
                                 input format: org.apache.hadoop.mapred.TextInputFormat
                                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                                 properties:
+                                  COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                   bucket_count -1
                                   columns key,value1,value2
                                   columns.comments 
                                   columns.types string:string:string
 #### A masked pattern was here ####
                                   name default.bucketmapjoin_tmp_result
+                                  numFiles 0
+                                  numRows 0
+                                  rawDataSize 0
                                   serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                                   serialization.format 1
                                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                  totalSize 0
 #### A masked pattern was here ####
                                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                                 name: default.bucketmapjoin_tmp_result
@@ -289,8 +296,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -329,15 +338,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -454,8 +468,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -570,8 +586,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucket_map_join_spark2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket_map_join_spark2.q.out b/ql/src/test/results/clientpositive/spark/bucket_map_join_spark2.q.out
index 7b304c5..7fcd591 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_spark2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_spark2.q.out
@@ -156,8 +156,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -232,15 +234,20 @@ STAGE PLANS:
                                 input format: org.apache.hadoop.mapred.TextInputFormat
                                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                                 properties:
+                                  COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                   bucket_count -1
                                   columns key,value1,value2
                                   columns.comments 
                                   columns.types string:string:string
 #### A masked pattern was here ####
                                   name default.bucketmapjoin_tmp_result
+                                  numFiles 0
+                                  numRows 0
+                                  rawDataSize 0
                                   serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                                   serialization.format 1
                                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                  totalSize 0
 #### A masked pattern was here ####
                                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                                 name: default.bucketmapjoin_tmp_result
@@ -273,8 +280,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -313,15 +322,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -443,8 +457,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -565,8 +581,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucket_map_join_spark3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket_map_join_spark3.q.out b/ql/src/test/results/clientpositive/spark/bucket_map_join_spark3.q.out
index 4265d1a..7efd3c2 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_spark3.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_spark3.q.out
@@ -156,8 +156,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -232,15 +234,20 @@ STAGE PLANS:
                                 input format: org.apache.hadoop.mapred.TextInputFormat
                                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                                 properties:
+                                  COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                   bucket_count -1
                                   columns key,value1,value2
                                   columns.comments 
                                   columns.types string:string:string
 #### A masked pattern was here ####
                                   name default.bucketmapjoin_tmp_result
+                                  numFiles 0
+                                  numRows 0
+                                  rawDataSize 0
                                   serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                                   serialization.format 1
                                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                  totalSize 0
 #### A masked pattern was here ####
                                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                                 name: default.bucketmapjoin_tmp_result
@@ -273,8 +280,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -313,15 +322,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -438,8 +452,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -554,8 +570,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out
index c6a7f0e..888b43a 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out
@@ -416,8 +416,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -488,15 +490,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key,value1,value2
                                 columns.comments 
                                 columns.types string:string:string
 #### A masked pattern was here ####
                                 name default.bucketmapjoin_tmp_result
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.bucketmapjoin_tmp_result
@@ -527,6 +534,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -545,6 +554,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.srcbucket_mapjoin
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -565,15 +576,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -745,6 +761,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -763,6 +781,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.srcbucket_mapjoin
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -861,8 +881,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin10.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin10.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin10.q.out
index 0e3b5c7..772c534 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin10.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin10.q.out
@@ -186,8 +186,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 3
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -230,8 +232,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -321,8 +325,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -365,8 +371,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 3
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin11.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin11.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin11.q.out
index 38f9785..8633e2c 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin11.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin11.q.out
@@ -201,8 +201,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 4
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -245,8 +247,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -342,8 +346,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -386,8 +392,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 4
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -538,8 +546,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 4
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -582,8 +592,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -679,8 +691,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -723,8 +737,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 4
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin12.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin12.q.out
index 55ded7d..a71f4ed 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin12.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin12.q.out
@@ -160,8 +160,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -255,8 +257,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -400,8 +404,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_3
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_3 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -490,8 +496,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out
index 48ff95f..5c215eb 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out
@@ -152,8 +152,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -224,15 +226,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key,value1,value2
                                 columns.comments 
                                 columns.types string:string:string
 #### A masked pattern was here ####
                                 name default.bucketmapjoin_tmp_result
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.bucketmapjoin_tmp_result
@@ -265,8 +272,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -305,15 +314,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -491,8 +505,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -609,8 +625,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -859,8 +877,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -903,8 +923,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1022,8 +1044,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
index 95589ca..6d3f182 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
@@ -176,8 +176,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -248,15 +250,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key,value1,value2
                                 columns.comments 
                                 columns.types string:string:string
 #### A masked pattern was here ####
                                 name default.bucketmapjoin_tmp_result
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.bucketmapjoin_tmp_result
@@ -289,8 +296,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -329,15 +338,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -515,8 +529,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -633,8 +649,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out
index fba617c..9f8d9d8 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out
@@ -174,6 +174,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -192,6 +194,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.srcbucket_mapjoin
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -244,15 +248,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key,value1,value2
                                 columns.comments 
                                 columns.types string:string:string
 #### A masked pattern was here ####
                                 name default.bucketmapjoin_tmp_result
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.bucketmapjoin_tmp_result
@@ -283,6 +292,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -301,6 +312,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.srcbucket_mapjoin
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -321,15 +334,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -493,6 +511,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -511,6 +531,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.srcbucket_mapjoin
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -607,6 +629,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -625,6 +649,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.srcbucket_mapjoin
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out
index 7214b42..0fd15e8 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out
@@ -224,6 +224,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -242,6 +244,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.srcbucket_mapjoin
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -294,15 +298,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key,value1,value2
                                 columns.comments 
                                 columns.types string:string:string
 #### A masked pattern was here ####
                                 name default.bucketmapjoin_tmp_result
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.bucketmapjoin_tmp_result
@@ -335,8 +344,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -379,8 +390,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -420,15 +433,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -604,6 +622,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -622,6 +642,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.srcbucket_mapjoin
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -720,8 +742,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -764,8 +788,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out
index d344070..971b8fc 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out
@@ -120,8 +120,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -219,8 +221,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 2
+                    numRows 0
                     partition_columns ds/hr
                     partition_columns.types string:string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin8.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin8.q.out
index a7ad6de..145f39e 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin8.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin8.q.out
@@ -125,8 +125,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -221,8 +223,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -380,8 +384,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -476,8 +482,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin9.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin9.q.out
index 6e4c536..4f054f5 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin9.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin9.q.out
@@ -128,8 +128,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 3
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -218,8 +220,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -405,8 +409,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -495,8 +501,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_1
                     numFiles 2
+                    numRows 0
                     partition_columns part
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out
index 9fff7b2..67e1eba 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out
@@ -122,8 +122,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 3
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -193,15 +195,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key,value1,value2
                                 columns.comments 
                                 columns.types string:string:string
 #### A masked pattern was here ####
                                 name default.bucketmapjoin_tmp_result
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.bucketmapjoin_tmp_result
@@ -227,6 +234,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -245,6 +254,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.srcbucket_mapjoin
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -265,15 +276,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out
index b199ad9..6861721 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out
@@ -136,8 +136,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -180,8 +182,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -253,15 +257,20 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
+                                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                                 bucket_count -1
                                 columns key,value1,value2
                                 columns.comments 
                                 columns.types string:string:string
 #### A masked pattern was here ####
                                 name default.bucketmapjoin_tmp_result
+                                numFiles 0
+                                numRows 0
+                                rawDataSize 0
                                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                totalSize 0
 #### A masked pattern was here ####
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: default.bucketmapjoin_tmp_result
@@ -292,6 +301,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin
                     numFiles 2
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -310,6 +321,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.srcbucket_mapjoin
                       numFiles 2
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -330,15 +343,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result


[17/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out b/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
index a6e15ba..7ef56fc 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
@@ -97,15 +97,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,cnt
                           columns.comments 
                           columns.types int:int
 #### A masked pattern was here ####
                           name default.outputtbl1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.outputtbl1
@@ -184,15 +189,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,cnt
                 columns.comments 
                 columns.types int:int
 #### A masked pattern was here ####
                 name default.outputtbl1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl1
@@ -215,15 +225,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,cnt
                     columns.comments 
                     columns.types int:int
 #### A masked pattern was here ####
                     name default.outputtbl1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl1
@@ -239,30 +254,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,cnt
               columns.comments 
               columns.types int:int
 #### A masked pattern was here ####
               name default.outputtbl1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,cnt
                 columns.comments 
                 columns.types int:int
 #### A masked pattern was here ####
                 name default.outputtbl1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl1
@@ -284,15 +309,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,cnt
                     columns.comments 
                     columns.types int:int
 #### A masked pattern was here ####
                     name default.outputtbl1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl1
@@ -308,30 +338,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,cnt
               columns.comments 
               columns.types int:int
 #### A masked pattern was here ####
               name default.outputtbl1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,cnt
                 columns.comments 
                 columns.types int:int
 #### A masked pattern was here ####
                 name default.outputtbl1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl1
@@ -496,15 +536,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,key2,cnt
                     columns.comments 
                     columns.types int:string:int
 #### A masked pattern was here ####
                     name default.outputtbl2
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl2
@@ -521,15 +566,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,cnt
                 columns.comments 
                 columns.types int:string:int
 #### A masked pattern was here ####
                 name default.outputtbl2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl2
@@ -1360,15 +1410,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key1,key2,cnt
                           columns.comments 
                           columns.types int:int:int
 #### A masked pattern was here ####
                           name default.outputtbl3
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.outputtbl3
@@ -1447,15 +1502,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,cnt
                 columns.comments 
                 columns.types int:int:int
 #### A masked pattern was here ####
                 name default.outputtbl3
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl3
@@ -1478,15 +1538,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,key2,cnt
                     columns.comments 
                     columns.types int:int:int
 #### A masked pattern was here ####
                     name default.outputtbl3
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl3
@@ -1502,30 +1567,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key1,key2,cnt
               columns.comments 
               columns.types int:int:int
 #### A masked pattern was here ####
               name default.outputtbl3
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,cnt
                 columns.comments 
                 columns.types int:int:int
 #### A masked pattern was here ####
                 name default.outputtbl3
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl3
@@ -1547,15 +1622,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,key2,cnt
                     columns.comments 
                     columns.types int:int:int
 #### A masked pattern was here ####
                     name default.outputtbl3
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl3
@@ -1571,30 +1651,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key1,key2,cnt
               columns.comments 
               columns.types int:int:int
 #### A masked pattern was here ####
               name default.outputtbl3
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,cnt
                 columns.comments 
                 columns.types int:int:int
 #### A masked pattern was here ####
                 name default.outputtbl3
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl3
@@ -1760,15 +1850,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,key2,key3,cnt
                     columns.comments 
                     columns.types int:int:string:int
 #### A masked pattern was here ####
                     name default.outputtbl4
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl4
@@ -1785,15 +1880,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,key3,cnt
                 columns.comments 
                 columns.types int:int:string:int
 #### A masked pattern was here ####
                 name default.outputtbl4
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl4
@@ -4425,15 +4525,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key1,key2,key3,key4,cnt
                           columns.comments 
                           columns.types int:int:string:int:int
 #### A masked pattern was here ####
                           name default.outputtbl5
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.outputtbl5
@@ -4512,15 +4617,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,key3,key4,cnt
                 columns.comments 
                 columns.types int:int:string:int:int
 #### A masked pattern was here ####
                 name default.outputtbl5
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl5
@@ -4543,15 +4653,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,key2,key3,key4,cnt
                     columns.comments 
                     columns.types int:int:string:int:int
 #### A masked pattern was here ####
                     name default.outputtbl5
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl5
@@ -4567,30 +4682,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key1,key2,key3,key4,cnt
               columns.comments 
               columns.types int:int:string:int:int
 #### A masked pattern was here ####
               name default.outputtbl5
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,key3,key4,cnt
                 columns.comments 
                 columns.types int:int:string:int:int
 #### A masked pattern was here ####
                 name default.outputtbl5
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl5
@@ -4612,15 +4737,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,key2,key3,key4,cnt
                     columns.comments 
                     columns.types int:int:string:int:int
 #### A masked pattern was here ####
                     name default.outputtbl5
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl5
@@ -4636,30 +4766,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key1,key2,key3,key4,cnt
               columns.comments 
               columns.types int:int:string:int:int
 #### A masked pattern was here ####
               name default.outputtbl5
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,key3,key4,cnt
                 columns.comments 
                 columns.types int:int:string:int:int
 #### A masked pattern was here ####
                 name default.outputtbl5
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl5

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/groupby_sort_6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_sort_6.q.out b/ql/src/test/results/clientpositive/groupby_sort_6.q.out
index f523085..9804cb0 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_6.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_6.q.out
@@ -87,15 +87,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,cnt
                     columns.comments 
                     columns.types int:int
 #### A masked pattern was here ####
                     name default.outputtbl1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl1
@@ -112,15 +117,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,cnt
                 columns.comments 
                 columns.types int:int
 #### A masked pattern was here ####
                 name default.outputtbl1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl1
@@ -361,8 +371,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.t1
               numFiles 1
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct t1 { string key, string val}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out b/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
index 38b24a4..2819487 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
@@ -97,15 +97,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,cnt
                           columns.comments 
                           columns.types int:int
 #### A masked pattern was here ####
                           name default.outputtbl1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.outputtbl1
@@ -184,15 +189,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,cnt
                 columns.comments 
                 columns.types int:int
 #### A masked pattern was here ####
                 name default.outputtbl1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl1
@@ -215,15 +225,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,cnt
                     columns.comments 
                     columns.types int:int
 #### A masked pattern was here ####
                     name default.outputtbl1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl1
@@ -239,30 +254,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,cnt
               columns.comments 
               columns.types int:int
 #### A masked pattern was here ####
               name default.outputtbl1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,cnt
                 columns.comments 
                 columns.types int:int
 #### A masked pattern was here ####
                 name default.outputtbl1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl1
@@ -284,15 +309,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,cnt
                     columns.comments 
                     columns.types int:int
 #### A masked pattern was here ####
                     name default.outputtbl1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl1
@@ -308,30 +338,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,cnt
               columns.comments 
               columns.types int:int
 #### A masked pattern was here ####
               name default.outputtbl1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,cnt
                 columns.comments 
                 columns.types int:int
 #### A masked pattern was here ####
                 name default.outputtbl1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl1 { i32 key, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl1
@@ -562,15 +602,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,key2,cnt
                     columns.comments 
                     columns.types int:string:int
 #### A masked pattern was here ####
                     name default.outputtbl2
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl2
@@ -587,15 +632,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,cnt
                 columns.comments 
                 columns.types int:string:int
 #### A masked pattern was here ####
                 name default.outputtbl2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl2
@@ -1426,15 +1476,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key1,key2,cnt
                           columns.comments 
                           columns.types int:int:int
 #### A masked pattern was here ####
                           name default.outputtbl3
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.outputtbl3
@@ -1513,15 +1568,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,cnt
                 columns.comments 
                 columns.types int:int:int
 #### A masked pattern was here ####
                 name default.outputtbl3
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl3
@@ -1544,15 +1604,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,key2,cnt
                     columns.comments 
                     columns.types int:int:int
 #### A masked pattern was here ####
                     name default.outputtbl3
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl3
@@ -1568,30 +1633,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key1,key2,cnt
               columns.comments 
               columns.types int:int:int
 #### A masked pattern was here ####
               name default.outputtbl3
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,cnt
                 columns.comments 
                 columns.types int:int:int
 #### A masked pattern was here ####
                 name default.outputtbl3
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl3
@@ -1613,15 +1688,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,key2,cnt
                     columns.comments 
                     columns.types int:int:int
 #### A masked pattern was here ####
                     name default.outputtbl3
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl3
@@ -1637,30 +1717,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key1,key2,cnt
               columns.comments 
               columns.types int:int:int
 #### A masked pattern was here ####
               name default.outputtbl3
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,cnt
                 columns.comments 
                 columns.types int:int:int
 #### A masked pattern was here ####
                 name default.outputtbl3
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl3
@@ -1892,15 +1982,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,key2,key3,cnt
                     columns.comments 
                     columns.types int:int:string:int
 #### A masked pattern was here ####
                     name default.outputtbl4
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl4
@@ -1917,15 +2012,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,key3,cnt
                 columns.comments 
                 columns.types int:int:string:int
 #### A masked pattern was here ####
                 name default.outputtbl4
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl4
@@ -4887,15 +4987,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key1,key2,key3,key4,cnt
                           columns.comments 
                           columns.types int:int:string:int:int
 #### A masked pattern was here ####
                           name default.outputtbl5
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.outputtbl5
@@ -4974,15 +5079,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,key3,key4,cnt
                 columns.comments 
                 columns.types int:int:string:int:int
 #### A masked pattern was here ####
                 name default.outputtbl5
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl5
@@ -5005,15 +5115,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,key2,key3,key4,cnt
                     columns.comments 
                     columns.types int:int:string:int:int
 #### A masked pattern was here ####
                     name default.outputtbl5
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl5
@@ -5029,30 +5144,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key1,key2,key3,key4,cnt
               columns.comments 
               columns.types int:int:string:int:int
 #### A masked pattern was here ####
               name default.outputtbl5
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,key3,key4,cnt
                 columns.comments 
                 columns.types int:int:string:int:int
 #### A masked pattern was here ####
                 name default.outputtbl5
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl5
@@ -5074,15 +5199,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,key2,key3,key4,cnt
                     columns.comments 
                     columns.types int:int:string:int:int
 #### A masked pattern was here ####
                     name default.outputtbl5
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.outputtbl5
@@ -5098,30 +5228,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key1,key2,key3,key4,cnt
               columns.comments 
               columns.types int:int:string:int:int
 #### A masked pattern was here ####
               name default.outputtbl5
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,key2,key3,key4,cnt
                 columns.comments 
                 columns.types int:int:string:int:int
 #### A masked pattern was here ####
                 name default.outputtbl5
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.outputtbl5

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/input_part1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input_part1.q.out b/ql/src/test/results/clientpositive/input_part1.q.out
index 2abaa3b..d3efb0d 100644
--- a/ql/src/test/results/clientpositive/input_part1.q.out
+++ b/ql/src/test/results/clientpositive/input_part1.q.out
@@ -51,15 +51,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value,hr,ds
                         columns.comments 
                         columns.types int:string:string:string
 #### A masked pattern was here ####
                         name default.dest1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.dest1
@@ -136,15 +141,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -167,15 +177,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,hr,ds
                     columns.comments 
                     columns.types int:string:string:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -191,30 +206,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,hr,ds
               columns.comments 
               columns.types int:string:string:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -236,15 +261,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,hr,ds
                     columns.comments 
                     columns.types int:string:string:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -260,30 +290,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,hr,ds
               columns.comments 
               columns.types int:string:string:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/input_part2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input_part2.q.out b/ql/src/test/results/clientpositive/input_part2.q.out
index 2ce071c..74db456 100644
--- a/ql/src/test/results/clientpositive/input_part2.q.out
+++ b/ql/src/test/results/clientpositive/input_part2.q.out
@@ -72,15 +72,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value,hr,ds
                         columns.comments 
                         columns.types int:string:string:string
 #### A masked pattern was here ####
                         name default.dest1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.dest1
@@ -106,15 +111,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value,hr,ds
                         columns.comments 
                         columns.types int:string:string:string
 #### A masked pattern was here ####
                         name default.dest2
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct dest2 { i32 key, string value, string hr, string ds}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.dest2
@@ -238,15 +248,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -269,15 +284,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,hr,ds
                     columns.comments 
                     columns.types int:string:string:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -293,30 +313,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,hr,ds
               columns.comments 
               columns.types int:string:string:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -338,15 +368,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,hr,ds
                     columns.comments 
                     columns.types int:string:string:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -362,30 +397,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,hr,ds
               columns.comments 
               columns.types int:string:string:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -417,15 +462,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest2
@@ -448,15 +498,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,hr,ds
                     columns.comments 
                     columns.types int:string:string:string
 #### A masked pattern was here ####
                     name default.dest2
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest2 { i32 key, string value, string hr, string ds}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest2
@@ -472,30 +527,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,hr,ds
               columns.comments 
               columns.types int:string:string:string
 #### A masked pattern was here ####
               name default.dest2
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest2 { i32 key, string value, string hr, string ds}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest2
@@ -517,15 +582,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,hr,ds
                     columns.comments 
                     columns.types int:string:string:string
 #### A masked pattern was here ####
                     name default.dest2
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest2 { i32 key, string value, string hr, string ds}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest2
@@ -541,30 +611,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,hr,ds
               columns.comments 
               columns.types int:string:string:string
 #### A masked pattern was here ####
               name default.dest2
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest2 { i32 key, string value, string hr, string ds}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest2


[14/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out b/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
index d88c53d..de1d6f4 100644
--- a/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
+++ b/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
@@ -55,15 +55,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value,hr,ds
                         columns.comments 
                         columns.types string:string:string:string
 #### A masked pattern was here ####
                         name default.tmptable
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct tmptable { string key, string value, string hr, string ds}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.tmptable
@@ -187,15 +192,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types string:string:string:string
 #### A masked pattern was here ####
                 name default.tmptable
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct tmptable { string key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.tmptable
@@ -218,15 +228,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,hr,ds
                     columns.comments 
                     columns.types string:string:string:string
 #### A masked pattern was here ####
                     name default.tmptable
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct tmptable { string key, string value, string hr, string ds}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.tmptable
@@ -242,30 +257,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,hr,ds
               columns.comments 
               columns.types string:string:string:string
 #### A masked pattern was here ####
               name default.tmptable
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct tmptable { string key, string value, string hr, string ds}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types string:string:string:string
 #### A masked pattern was here ####
                 name default.tmptable
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct tmptable { string key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.tmptable
@@ -287,15 +312,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,hr,ds
                     columns.comments 
                     columns.types string:string:string:string
 #### A masked pattern was here ####
                     name default.tmptable
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct tmptable { string key, string value, string hr, string ds}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.tmptable
@@ -311,30 +341,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,hr,ds
               columns.comments 
               columns.types string:string:string:string
 #### A masked pattern was here ####
               name default.tmptable
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct tmptable { string key, string value, string hr, string ds}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,hr,ds
                 columns.comments 
                 columns.types string:string:string:string
 #### A masked pattern was here ####
                 name default.tmptable
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct tmptable { string key, string value, string hr, string ds}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.tmptable

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/rcfile_default_format.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/rcfile_default_format.q.out b/ql/src/test/results/clientpositive/rcfile_default_format.q.out
index c961231..bb846c0 100644
--- a/ql/src/test/results/clientpositive/rcfile_default_format.q.out
+++ b/ql/src/test/results/clientpositive/rcfile_default_format.q.out
@@ -23,6 +23,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -252,6 +257,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -336,6 +346,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/reduce_deduplicate.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/reduce_deduplicate.q.out b/ql/src/test/results/clientpositive/reduce_deduplicate.q.out
index 379f884..075336b 100644
--- a/ql/src/test/results/clientpositive/reduce_deduplicate.q.out
+++ b/ql/src/test/results/clientpositive/reduce_deduplicate.q.out
@@ -106,6 +106,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 properties:
+                  COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                   bucket_count 2
                   bucket_field_name key
                   columns key,value
@@ -113,9 +114,13 @@ STAGE PLANS:
                   columns.types string:string
 #### A masked pattern was here ####
                   name default.bucket5_1
+                  numFiles 0
+                  numRows 0
+                  rawDataSize 0
                   serialization.ddl struct bucket5_1 { string key, string value}
                   serialization.format 1
                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  totalSize 0
 #### A masked pattern was here ####
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: default.bucket5_1
@@ -132,6 +137,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -139,9 +145,13 @@ STAGE PLANS:
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.bucket5_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket5_1 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket5_1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/sample1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample1.q.out b/ql/src/test/results/clientpositive/sample1.q.out
index e188902..57e61b9 100644
--- a/ql/src/test/results/clientpositive/sample1.q.out
+++ b/ql/src/test/results/clientpositive/sample1.q.out
@@ -55,15 +55,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value,dt,hr
                         columns.comments 
                         columns.types int:string:string:string
 #### A masked pattern was here ####
                         name default.dest1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.dest1
@@ -140,15 +145,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,dt,hr
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -171,15 +181,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,dt,hr
                     columns.comments 
                     columns.types int:string:string:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -195,30 +210,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,dt,hr
               columns.comments 
               columns.types int:string:string:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,dt,hr
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -240,15 +265,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,dt,hr
                     columns.comments 
                     columns.types int:string:string:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -264,30 +294,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,dt,hr
               columns.comments 
               columns.types int:string:string:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,dt,hr
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/sample2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample2.q.out b/ql/src/test/results/clientpositive/sample2.q.out
index 9653c41..92f0d5a 100644
--- a/ql/src/test/results/clientpositive/sample2.q.out
+++ b/ql/src/test/results/clientpositive/sample2.q.out
@@ -56,15 +56,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value
                         columns.comments 
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.dest1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct dest1 { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.dest1
@@ -141,15 +146,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -172,15 +182,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -196,30 +211,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -241,15 +266,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -265,30 +295,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/sample4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample4.q.out b/ql/src/test/results/clientpositive/sample4.q.out
index a9730b1..b4e58c5 100644
--- a/ql/src/test/results/clientpositive/sample4.q.out
+++ b/ql/src/test/results/clientpositive/sample4.q.out
@@ -56,15 +56,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value
                         columns.comments 
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.dest1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct dest1 { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.dest1
@@ -141,15 +146,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -172,15 +182,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -196,30 +211,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -241,15 +266,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -265,30 +295,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/sample5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample5.q.out b/ql/src/test/results/clientpositive/sample5.q.out
index d5ff403..c786f21 100644
--- a/ql/src/test/results/clientpositive/sample5.q.out
+++ b/ql/src/test/results/clientpositive/sample5.q.out
@@ -57,15 +57,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value
                         columns.comments 
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.dest1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct dest1 { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.dest1
@@ -142,15 +147,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -173,15 +183,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -197,30 +212,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -242,15 +267,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -266,30 +296,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/sample6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample6.q.out b/ql/src/test/results/clientpositive/sample6.q.out
index ea71ad6..519647f 100644
--- a/ql/src/test/results/clientpositive/sample6.q.out
+++ b/ql/src/test/results/clientpositive/sample6.q.out
@@ -54,15 +54,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value
                         columns.comments 
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.dest1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct dest1 { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.dest1
@@ -139,15 +144,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -170,15 +180,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -194,30 +209,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -239,15 +264,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -263,30 +293,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/sample7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample7.q.out b/ql/src/test/results/clientpositive/sample7.q.out
index 94618e5..2352cdc 100644
--- a/ql/src/test/results/clientpositive/sample7.q.out
+++ b/ql/src/test/results/clientpositive/sample7.q.out
@@ -55,15 +55,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value
                         columns.comments 
                         columns.types int:string
 #### A masked pattern was here ####
                         name default.dest1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct dest1 { i32 key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.dest1
@@ -140,15 +145,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -171,15 +181,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -195,30 +210,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1
@@ -240,15 +265,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -264,30 +294,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types int:string
 #### A masked pattern was here ####
               name default.dest1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest1 { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1


[02/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_table.q.out b/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_table.q.out
index 14abca9..f8f413c 100644
--- a/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_table.q.out
+++ b/ql/src/test/results/clientpositive/tez/schema_evol_text_nonvec_mapwork_table.q.out
@@ -48,6 +48,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -184,13 +189,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=11)
+        Select Operator [SEL_3] (rows=4 width=12)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=11)
+            Select Operator [SEL_1] (rows=4 width=12)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=11)
+              TableScan [TS_0] (rows=4 width=12)
                 default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right
@@ -294,6 +299,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -433,13 +443,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=15)
+        Select Operator [SEL_3] (rows=4 width=12)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=15)
+            Select Operator [SEL_1] (rows=4 width=12)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=15)
+              TableScan [TS_0] (rows=4 width=12)
                 default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right
@@ -697,13 +707,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=55)
+        Select Operator [SEL_3] (rows=4 width=79)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=55)
+            Select Operator [SEL_1] (rows=4 width=79)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=55)
+              TableScan [TS_0] (rows=4 width=79)
                 default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double order by insert_num
@@ -849,13 +859,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=9 width=147)
+        Select Operator [SEL_3] (rows=3 width=150)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=9 width=147)
+            Select Operator [SEL_1] (rows=3 width=150)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=9 width=147)
+              TableScan [TS_0] (rows=3 width=150)
                 default@table_change_date_group_string_group_timestamp,table_change_date_group_string_group_timestamp,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_date_group_string_group_timestamp order by insert_num
@@ -1017,13 +1027,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=9 width=91)
+        Select Operator [SEL_3] (rows=3 width=65)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=9 width=91)
+            Select Operator [SEL_1] (rows=3 width=65)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=9 width=91)
+              TableScan [TS_0] (rows=3 width=65)
                 default@table_change_date_group_string_group_date,table_change_date_group_string_group_date,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_date_group_string_group_date order by insert_num
@@ -1165,13 +1175,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=33)
+        Select Operator [SEL_3] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=33)
+            Select Operator [SEL_1] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=33)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_string,table_change_numeric_group_string_group_multi_ints_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_string order by insert_num
@@ -1306,13 +1316,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=140)
+        Select Operator [SEL_3] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=140)
+            Select Operator [SEL_1] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=140)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_char,table_change_numeric_group_string_group_multi_ints_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_char order by insert_num
@@ -1447,13 +1457,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=32)
+        Select Operator [SEL_3] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=32)
+            Select Operator [SEL_1] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=32)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_char_trunc,table_change_numeric_group_string_group_multi_ints_char_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_char_trunc order by insert_num
@@ -1588,13 +1598,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=33)
+        Select Operator [SEL_3] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=33)
+            Select Operator [SEL_1] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=33)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_varchar,table_change_numeric_group_string_group_multi_ints_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_varchar order by insert_num
@@ -1729,13 +1739,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=30)
+        Select Operator [SEL_3] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=30)
+            Select Operator [SEL_1] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=30)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_varchar_trunc,table_change_numeric_group_string_group_multi_ints_varchar_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_varchar_trunc order by insert_num
@@ -1868,13 +1878,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=38)
+        Select Operator [SEL_3] (rows=4 width=53)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=38)
+            Select Operator [SEL_1] (rows=4 width=53)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=38)
+              TableScan [TS_0] (rows=4 width=53)
                 default@table_change_numeric_group_string_group_floating_string,table_change_numeric_group_string_group_floating_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_string order by insert_num
@@ -2007,13 +2017,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=116)
+        Select Operator [SEL_3] (rows=4 width=55)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=116)
+            Select Operator [SEL_1] (rows=4 width=55)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=116)
+              TableScan [TS_0] (rows=4 width=55)
                 default@table_change_numeric_group_string_group_floating_char,table_change_numeric_group_string_group_floating_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_char order by insert_num
@@ -2146,13 +2156,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=39)
+        Select Operator [SEL_3] (rows=4 width=53)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=39)
+            Select Operator [SEL_1] (rows=4 width=53)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=39)
+              TableScan [TS_0] (rows=4 width=53)
                 default@table_change_numeric_group_string_group_floating_char_trunc,table_change_numeric_group_string_group_floating_char_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_char_trunc order by insert_num
@@ -2285,13 +2295,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=36)
+        Select Operator [SEL_3] (rows=4 width=50)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=36)
+            Select Operator [SEL_1] (rows=4 width=50)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=36)
+              TableScan [TS_0] (rows=4 width=50)
                 default@table_change_numeric_group_string_group_floating_varchar,table_change_numeric_group_string_group_floating_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_varchar order by insert_num
@@ -2424,13 +2434,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=36)
+        Select Operator [SEL_3] (rows=4 width=55)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=36)
+            Select Operator [SEL_1] (rows=4 width=55)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=36)
+              TableScan [TS_0] (rows=4 width=55)
                 default@table_change_numeric_group_string_group_floating_varchar_trunc,table_change_numeric_group_string_group_floating_varchar_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_varchar_trunc order by insert_num
@@ -2573,13 +2583,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=79)
+        Select Operator [SEL_3] (rows=4 width=59)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=79)
+            Select Operator [SEL_1] (rows=4 width=59)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=79)
+              TableScan [TS_0] (rows=4 width=59)
                 default@table_change_string_group_string_group_string,table_change_string_group_string_group_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string order by insert_num
@@ -2712,13 +2722,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=93)
+        Select Operator [SEL_3] (rows=4 width=163)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=93)
+            Select Operator [SEL_1] (rows=4 width=163)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=93)
+              TableScan [TS_0] (rows=4 width=163)
                 default@table_change_string_group_string_group_char,table_change_string_group_string_group_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_string_group_char order by insert_num
@@ -2851,13 +2861,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=68)
+        Select Operator [SEL_3] (rows=4 width=46)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=68)
+            Select Operator [SEL_1] (rows=4 width=46)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=68)
+              TableScan [TS_0] (rows=4 width=46)
                 default@table_change_string_group_string_group_varchar,table_change_string_group_string_group_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_string_group_varchar order by insert_num
@@ -3004,13 +3014,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=44)
+        Select Operator [SEL_3] (rows=4 width=29)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=44)
+            Select Operator [SEL_1] (rows=4 width=29)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
-              TableScan [TS_0] (rows=10 width=44)
+              TableScan [TS_0] (rows=4 width=29)
                 default@table_change_lower_to_higher_numeric_group_tinyint,table_change_lower_to_higher_numeric_group_tinyint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,b from table_change_lower_to_higher_numeric_group_tinyint order by insert_num
@@ -3147,13 +3157,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=44)
+        Select Operator [SEL_3] (rows=4 width=32)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=44)
+            Select Operator [SEL_1] (rows=4 width=32)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=10 width=44)
+              TableScan [TS_0] (rows=4 width=32)
                 default@table_change_lower_to_higher_numeric_group_smallint,table_change_lower_to_higher_numeric_group_smallint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_lower_to_higher_numeric_group_smallint order by insert_num
@@ -3288,13 +3298,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=42)
+        Select Operator [SEL_3] (rows=4 width=33)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=42)
+            Select Operator [SEL_1] (rows=4 width=33)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=42)
+              TableScan [TS_0] (rows=4 width=33)
                 default@table_change_lower_to_higher_numeric_group_int,table_change_lower_to_higher_numeric_group_int,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_lower_to_higher_numeric_group_int order by insert_num
@@ -3427,13 +3437,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=41)
+        Select Operator [SEL_3] (rows=4 width=31)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=41)
+            Select Operator [SEL_1] (rows=4 width=31)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=41)
+              TableScan [TS_0] (rows=4 width=31)
                 default@table_change_lower_to_higher_numeric_group_bigint,table_change_lower_to_higher_numeric_group_bigint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_bigint order by insert_num
@@ -3564,13 +3574,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=33)
+        Select Operator [SEL_3] (rows=4 width=59)
           Output:["_col0","_col1","_col2","_col3"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=33)
+            Select Operator [SEL_1] (rows=4 width=59)
               Output:["_col0","_col1","_col2","_col3"]
-              TableScan [TS_0] (rows=10 width=33)
+              TableScan [TS_0] (rows=4 width=59)
                 default@table_change_lower_to_higher_numeric_group_decimal,table_change_lower_to_higher_numeric_group_decimal,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","b"]
 
 PREHOOK: query: select insert_num,c1,c2,b from table_change_lower_to_higher_numeric_group_decimal order by insert_num
@@ -3699,13 +3709,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=14)
+        Select Operator [SEL_3] (rows=4 width=19)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=14)
+            Select Operator [SEL_1] (rows=4 width=19)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=14)
+              TableScan [TS_0] (rows=4 width=19)
                 default@table_change_lower_to_higher_numeric_group_float,table_change_lower_to_higher_numeric_group_float,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","b"]
 
 PREHOOK: query: select insert_num,c1,b from table_change_lower_to_higher_numeric_group_float order by insert_num

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/schema_evol_text_vec_mapwork_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/schema_evol_text_vec_mapwork_table.q.out b/ql/src/test/results/clientpositive/tez/schema_evol_text_vec_mapwork_table.q.out
index 2b799f9..f74a754 100644
--- a/ql/src/test/results/clientpositive/tez/schema_evol_text_vec_mapwork_table.q.out
+++ b/ql/src/test/results/clientpositive/tez/schema_evol_text_vec_mapwork_table.q.out
@@ -52,6 +52,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -188,13 +193,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=11)
+        Select Operator [SEL_7] (rows=4 width=12)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=11)
+            Select Operator [SEL_5] (rows=4 width=12)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=11)
+              TableScan [TS_0] (rows=4 width=12)
                 default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right
@@ -298,6 +303,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -437,13 +447,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=15)
+        Select Operator [SEL_7] (rows=4 width=12)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=15)
+            Select Operator [SEL_5] (rows=4 width=12)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=15)
+              TableScan [TS_0] (rows=4 width=12)
                 default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right
@@ -701,13 +711,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=38)
+        Select Operator [SEL_7] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=38)
+            Select Operator [SEL_5] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=38)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double order by insert_num
@@ -853,13 +863,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=9 width=132)
+        Select Operator [SEL_7] (rows=3 width=150)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=9 width=132)
+            Select Operator [SEL_5] (rows=3 width=150)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=9 width=132)
+              TableScan [TS_0] (rows=3 width=150)
                 default@table_change_date_group_string_group_timestamp,table_change_date_group_string_group_timestamp,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_date_group_string_group_timestamp order by insert_num
@@ -1021,13 +1031,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=9 width=61)
+        Select Operator [SEL_7] (rows=3 width=65)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=9 width=61)
+            Select Operator [SEL_5] (rows=3 width=65)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=9 width=61)
+              TableScan [TS_0] (rows=3 width=65)
                 default@table_change_date_group_string_group_date,table_change_date_group_string_group_date,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_date_group_string_group_date order by insert_num
@@ -1169,13 +1179,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=33)
+        Select Operator [SEL_7] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=33)
+            Select Operator [SEL_5] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=33)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_string,table_change_numeric_group_string_group_multi_ints_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_string order by insert_num
@@ -1310,13 +1320,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=33)
+        Select Operator [SEL_7] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=33)
+            Select Operator [SEL_5] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=33)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_char,table_change_numeric_group_string_group_multi_ints_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_char order by insert_num
@@ -1451,13 +1461,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=30)
+        Select Operator [SEL_7] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=30)
+            Select Operator [SEL_5] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=30)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_char_trunc,table_change_numeric_group_string_group_multi_ints_char_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_char_trunc order by insert_num
@@ -1592,13 +1602,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=33)
+        Select Operator [SEL_7] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=33)
+            Select Operator [SEL_5] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=33)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_varchar,table_change_numeric_group_string_group_multi_ints_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_varchar order by insert_num
@@ -1733,13 +1743,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=30)
+        Select Operator [SEL_7] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=30)
+            Select Operator [SEL_5] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=30)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_varchar_trunc,table_change_numeric_group_string_group_multi_ints_varchar_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_varchar_trunc order by insert_num
@@ -1872,13 +1882,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=38)
+        Select Operator [SEL_7] (rows=4 width=53)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=38)
+            Select Operator [SEL_5] (rows=4 width=53)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=38)
+              TableScan [TS_0] (rows=4 width=53)
                 default@table_change_numeric_group_string_group_floating_string,table_change_numeric_group_string_group_floating_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_string order by insert_num
@@ -2011,13 +2021,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=38)
+        Select Operator [SEL_7] (rows=4 width=55)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=38)
+            Select Operator [SEL_5] (rows=4 width=55)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=38)
+              TableScan [TS_0] (rows=4 width=55)
                 default@table_change_numeric_group_string_group_floating_char,table_change_numeric_group_string_group_floating_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_char order by insert_num
@@ -2150,13 +2160,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=35)
+        Select Operator [SEL_7] (rows=4 width=53)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=35)
+            Select Operator [SEL_5] (rows=4 width=53)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=35)
+              TableScan [TS_0] (rows=4 width=53)
                 default@table_change_numeric_group_string_group_floating_char_trunc,table_change_numeric_group_string_group_floating_char_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_char_trunc order by insert_num
@@ -2289,13 +2299,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=36)
+        Select Operator [SEL_7] (rows=4 width=50)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=36)
+            Select Operator [SEL_5] (rows=4 width=50)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=36)
+              TableScan [TS_0] (rows=4 width=50)
                 default@table_change_numeric_group_string_group_floating_varchar,table_change_numeric_group_string_group_floating_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_varchar order by insert_num
@@ -2428,13 +2438,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=36)
+        Select Operator [SEL_7] (rows=4 width=55)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=36)
+            Select Operator [SEL_5] (rows=4 width=55)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=36)
+              TableScan [TS_0] (rows=4 width=55)
                 default@table_change_numeric_group_string_group_floating_varchar_trunc,table_change_numeric_group_string_group_floating_varchar_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_varchar_trunc order by insert_num
@@ -2577,13 +2587,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=54)
+        Select Operator [SEL_7] (rows=4 width=59)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=54)
+            Select Operator [SEL_5] (rows=4 width=59)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=54)
+              TableScan [TS_0] (rows=4 width=59)
                 default@table_change_string_group_string_group_string,table_change_string_group_string_group_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string order by insert_num
@@ -2716,13 +2726,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=46)
+        Select Operator [SEL_7] (rows=4 width=44)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=46)
+            Select Operator [SEL_5] (rows=4 width=44)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=46)
+              TableScan [TS_0] (rows=4 width=44)
                 default@table_change_string_group_string_group_char,table_change_string_group_string_group_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_string_group_char order by insert_num
@@ -2855,13 +2865,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=45)
+        Select Operator [SEL_7] (rows=4 width=46)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=45)
+            Select Operator [SEL_5] (rows=4 width=46)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=45)
+              TableScan [TS_0] (rows=4 width=46)
                 default@table_change_string_group_string_group_varchar,table_change_string_group_string_group_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_string_group_varchar order by insert_num
@@ -3008,13 +3018,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=44)
+        Select Operator [SEL_7] (rows=4 width=29)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=44)
+            Select Operator [SEL_5] (rows=4 width=29)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
-              TableScan [TS_0] (rows=10 width=44)
+              TableScan [TS_0] (rows=4 width=29)
                 default@table_change_lower_to_higher_numeric_group_tinyint,table_change_lower_to_higher_numeric_group_tinyint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,b from table_change_lower_to_higher_numeric_group_tinyint order by insert_num
@@ -3151,13 +3161,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=44)
+        Select Operator [SEL_7] (rows=4 width=32)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=44)
+            Select Operator [SEL_5] (rows=4 width=32)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=10 width=44)
+              TableScan [TS_0] (rows=4 width=32)
                 default@table_change_lower_to_higher_numeric_group_smallint,table_change_lower_to_higher_numeric_group_smallint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_lower_to_higher_numeric_group_smallint order by insert_num
@@ -3292,13 +3302,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=42)
+        Select Operator [SEL_7] (rows=4 width=33)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=42)
+            Select Operator [SEL_5] (rows=4 width=33)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=42)
+              TableScan [TS_0] (rows=4 width=33)
                 default@table_change_lower_to_higher_numeric_group_int,table_change_lower_to_higher_numeric_group_int,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_lower_to_higher_numeric_group_int order by insert_num
@@ -3431,13 +3441,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=41)
+        Select Operator [SEL_7] (rows=4 width=31)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=41)
+            Select Operator [SEL_5] (rows=4 width=31)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=41)
+              TableScan [TS_0] (rows=4 width=31)
                 default@table_change_lower_to_higher_numeric_group_bigint,table_change_lower_to_higher_numeric_group_bigint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_bigint order by insert_num
@@ -3568,13 +3578,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=33)
+        Select Operator [SEL_7] (rows=4 width=59)
           Output:["_col0","_col1","_col2","_col3"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=33)
+            Select Operator [SEL_5] (rows=4 width=59)
               Output:["_col0","_col1","_col2","_col3"]
-              TableScan [TS_0] (rows=10 width=33)
+              TableScan [TS_0] (rows=4 width=59)
                 default@table_change_lower_to_higher_numeric_group_decimal,table_change_lower_to_higher_numeric_group_decimal,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","b"]
 
 PREHOOK: query: select insert_num,c1,c2,b from table_change_lower_to_higher_numeric_group_decimal order by insert_num
@@ -3703,13 +3713,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=14)
+        Select Operator [SEL_7] (rows=4 width=19)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=14)
+            Select Operator [SEL_5] (rows=4 width=19)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=14)
+              TableScan [TS_0] (rows=4 width=19)
                 default@table_change_lower_to_higher_numeric_group_float,table_change_lower_to_higher_numeric_group_float,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","b"]
 
 PREHOOK: query: select insert_num,c1,b from table_change_lower_to_higher_numeric_group_float order by insert_num

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/schema_evol_text_vecrow_mapwork_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/schema_evol_text_vecrow_mapwork_table.q.out b/ql/src/test/results/clientpositive/tez/schema_evol_text_vecrow_mapwork_table.q.out
index 1b46fa7..81cb662 100644
--- a/ql/src/test/results/clientpositive/tez/schema_evol_text_vecrow_mapwork_table.q.out
+++ b/ql/src/test/results/clientpositive/tez/schema_evol_text_vecrow_mapwork_table.q.out
@@ -52,6 +52,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -188,13 +193,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=11)
+        Select Operator [SEL_7] (rows=4 width=12)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=11)
+            Select Operator [SEL_5] (rows=4 width=12)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=11)
+              TableScan [TS_0] (rows=4 width=12)
                 default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right
@@ -298,6 +303,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -437,13 +447,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=15)
+        Select Operator [SEL_7] (rows=4 width=12)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=15)
+            Select Operator [SEL_5] (rows=4 width=12)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=15)
+              TableScan [TS_0] (rows=4 width=12)
                 default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right
@@ -701,13 +711,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=38)
+        Select Operator [SEL_7] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=38)
+            Select Operator [SEL_5] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=38)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double order by insert_num
@@ -853,13 +863,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=9 width=132)
+        Select Operator [SEL_7] (rows=3 width=150)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=9 width=132)
+            Select Operator [SEL_5] (rows=3 width=150)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=9 width=132)
+              TableScan [TS_0] (rows=3 width=150)
                 default@table_change_date_group_string_group_timestamp,table_change_date_group_string_group_timestamp,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_date_group_string_group_timestamp order by insert_num
@@ -1021,13 +1031,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=9 width=61)
+        Select Operator [SEL_7] (rows=3 width=65)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=9 width=61)
+            Select Operator [SEL_5] (rows=3 width=65)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=9 width=61)
+              TableScan [TS_0] (rows=3 width=65)
                 default@table_change_date_group_string_group_date,table_change_date_group_string_group_date,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_date_group_string_group_date order by insert_num
@@ -1169,13 +1179,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=33)
+        Select Operator [SEL_7] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=33)
+            Select Operator [SEL_5] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=33)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_string,table_change_numeric_group_string_group_multi_ints_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_string order by insert_num
@@ -1310,13 +1320,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=33)
+        Select Operator [SEL_7] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=33)
+            Select Operator [SEL_5] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=33)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_char,table_change_numeric_group_string_group_multi_ints_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_char order by insert_num
@@ -1451,13 +1461,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=30)
+        Select Operator [SEL_7] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=30)
+            Select Operator [SEL_5] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=30)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_char_trunc,table_change_numeric_group_string_group_multi_ints_char_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_char_trunc order by insert_num
@@ -1592,13 +1602,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=33)
+        Select Operator [SEL_7] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=33)
+            Select Operator [SEL_5] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=33)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_varchar,table_change_numeric_group_string_group_multi_ints_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_varchar order by insert_num
@@ -1733,13 +1743,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=30)
+        Select Operator [SEL_7] (rows=4 width=37)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=30)
+            Select Operator [SEL_5] (rows=4 width=37)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=30)
+              TableScan [TS_0] (rows=4 width=37)
                 default@table_change_numeric_group_string_group_multi_ints_varchar_trunc,table_change_numeric_group_string_group_multi_ints_varchar_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_varchar_trunc order by insert_num
@@ -1872,13 +1882,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=38)
+        Select Operator [SEL_7] (rows=4 width=53)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=38)
+            Select Operator [SEL_5] (rows=4 width=53)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=38)
+              TableScan [TS_0] (rows=4 width=53)
                 default@table_change_numeric_group_string_group_floating_string,table_change_numeric_group_string_group_floating_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_string order by insert_num
@@ -2011,13 +2021,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=38)
+        Select Operator [SEL_7] (rows=4 width=55)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=38)
+            Select Operator [SEL_5] (rows=4 width=55)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=38)
+              TableScan [TS_0] (rows=4 width=55)
                 default@table_change_numeric_group_string_group_floating_char,table_change_numeric_group_string_group_floating_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_char order by insert_num
@@ -2150,13 +2160,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=35)
+        Select Operator [SEL_7] (rows=4 width=53)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=35)
+            Select Operator [SEL_5] (rows=4 width=53)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=35)
+              TableScan [TS_0] (rows=4 width=53)
                 default@table_change_numeric_group_string_group_floating_char_trunc,table_change_numeric_group_string_group_floating_char_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_char_trunc order by insert_num
@@ -2289,13 +2299,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=36)
+        Select Operator [SEL_7] (rows=4 width=50)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=36)
+            Select Operator [SEL_5] (rows=4 width=50)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=36)
+              TableScan [TS_0] (rows=4 width=50)
                 default@table_change_numeric_group_string_group_floating_varchar,table_change_numeric_group_string_group_floating_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_varchar order by insert_num
@@ -2428,13 +2438,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=36)
+        Select Operator [SEL_7] (rows=4 width=55)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=36)
+            Select Operator [SEL_5] (rows=4 width=55)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=36)
+              TableScan [TS_0] (rows=4 width=55)
                 default@table_change_numeric_group_string_group_floating_varchar_trunc,table_change_numeric_group_string_group_floating_varchar_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_varchar_trunc order by insert_num
@@ -2577,13 +2587,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=54)
+        Select Operator [SEL_7] (rows=4 width=59)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=54)
+            Select Operator [SEL_5] (rows=4 width=59)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=54)
+              TableScan [TS_0] (rows=4 width=59)
                 default@table_change_string_group_string_group_string,table_change_string_group_string_group_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string order by insert_num
@@ -2716,13 +2726,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=46)
+        Select Operator [SEL_7] (rows=4 width=44)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=46)
+            Select Operator [SEL_5] (rows=4 width=44)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=46)
+              TableScan [TS_0] (rows=4 width=44)
                 default@table_change_string_group_string_group_char,table_change_string_group_string_group_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_string_group_char order by insert_num
@@ -2855,13 +2865,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=45)
+        Select Operator [SEL_7] (rows=4 width=46)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=45)
+            Select Operator [SEL_5] (rows=4 width=46)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=45)
+              TableScan [TS_0] (rows=4 width=46)
                 default@table_change_string_group_string_group_varchar,table_change_string_group_string_group_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_string_group_varchar order by insert_num
@@ -3008,13 +3018,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=44)
+        Select Operator [SEL_7] (rows=4 width=29)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=44)
+            Select Operator [SEL_5] (rows=4 width=29)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
-              TableScan [TS_0] (rows=10 width=44)
+              TableScan [TS_0] (rows=4 width=29)
                 default@table_change_lower_to_higher_numeric_group_tinyint,table_change_lower_to_higher_numeric_group_tinyint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,b from table_change_lower_to_higher_numeric_group_tinyint order by insert_num
@@ -3151,13 +3161,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=44)
+        Select Operator [SEL_7] (rows=4 width=32)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=44)
+            Select Operator [SEL_5] (rows=4 width=32)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=10 width=44)
+              TableScan [TS_0] (rows=4 width=32)
                 default@table_change_lower_to_higher_numeric_group_smallint,table_change_lower_to_higher_numeric_group_smallint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_lower_to_higher_numeric_group_smallint order by insert_num
@@ -3292,13 +3302,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=42)
+        Select Operator [SEL_7] (rows=4 width=33)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=42)
+            Select Operator [SEL_5] (rows=4 width=33)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=42)
+              TableScan [TS_0] (rows=4 width=33)
                 default@table_change_lower_to_higher_numeric_group_int,table_change_lower_to_higher_numeric_group_int,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_lower_to_higher_numeric_group_int order by insert_num
@@ -3431,13 +3441,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=41)
+        Select Operator [SEL_7] (rows=4 width=31)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=41)
+            Select Operator [SEL_5] (rows=4 width=31)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=41)
+              TableScan [TS_0] (rows=4 width=31)
                 default@table_change_lower_to_higher_numeric_group_bigint,table_change_lower_to_higher_numeric_group_bigint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_bigint order by insert_num
@@ -3568,13 +3578,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=33)
+        Select Operator [SEL_7] (rows=4 width=59)
           Output:["_col0","_col1","_col2","_col3"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=33)
+            Select Operator [SEL_5] (rows=4 width=59)
               Output:["_col0","_col1","_col2","_col3"]
-              TableScan [TS_0] (rows=10 width=33)
+              TableScan [TS_0] (rows=4 width=59)
                 default@table_change_lower_to_higher_numeric_group_decimal,table_change_lower_to_higher_numeric_group_decimal,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","b"]
 
 PREHOOK: query: select insert_num,c1,c2,b from table_change_lower_to_higher_numeric_group_decimal order by insert_num
@@ -3703,13 +3713,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=14)
+        Select Operator [SEL_7] (rows=4 width=19)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=14)
+            Select Operator [SEL_5] (rows=4 width=19)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=14)
+              TableScan [TS_0] (rows=4 width=19)
                 default@table_change_lower_to_higher_numeric_group_float,table_change_lower_to_higher_numeric_group_float,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","b"]
 
 PREHOOK: query: select insert_num,c1,b from table_change_lower_to_higher_numeric_group_float order by insert_num

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/tez_join_result_complex.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/tez_join_result_complex.q.out b/ql/src/test/results/clientpositive/tez/tez_join_result_complex.q.out
index 3e3b08c..8640187 100644
--- a/ql/src/test/results/clientpositive/tez/tez_join_result_complex.q.out
+++ b/ql/src/test/results/clientpositive/tez/tez_join_result_complex.q.out
@@ -206,6 +206,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.ct_events_clean
                     numFiles 1
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct ct_events_clean { string contact_event_id, string ce_create_dt, string ce_end_dt, string contact_type, string cnctevs_cd, string contact_mode, string cntvnst_stts_cd, i32 total_transfers, list<string> ce_notes}
                     serialization.format 	
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -227,6 +229,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.ct_events_clean
                       numFiles 1
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct ct_events_clean { string contact_event_id, string ce_create_dt, string ce_end_dt, string contact_type, string cnctevs_cd, string contact_mode, string cntvnst_stts_cd, i32 total_transfers, list<string> ce_notes}
                       serialization.format 	
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -308,6 +312,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.service_request_clean
                     numFiles 1
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct service_request_clean { string cnctevn_id, string svcrqst_id, string svcrqst_crt_dts, i32 subject_seq_no, string plan_component, string cust_segment, string cnctyp_cd, string cnctmd_cd, string cnctevs_cd, string svcrtyp_cd, string svrstyp_cd, string cmpltyp_cd, string catsrsn_cd, string apealvl_cd, string cnstnty_cd, string svcrqst_asrqst_ind, string svcrqst_rtnorig_in, string svcrqst_vwasof_dt, string sum_reason_cd, string sum_reason, string crsr_master_claim_index, list<string> svcrqct_cds, string svcrqst_lupdt, timestamp crsr_lupdt, string cntevsds_lupdt, i32 ignore_me, list<string> notes}
                     serialization.format 	
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -329,6 +335,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.service_request_clean
                       numFiles 1
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct service_request_clean { string cnctevn_id, string svcrqst_id, string svcrqst_crt_dts, i32 subject_seq_no, string plan_component, string cust_segment, string cnctyp_cd, string cnctmd_cd, string cnctevs_cd, string svcrtyp_cd, string svrstyp_cd, string cmpltyp_cd, string catsrsn_cd, string apealvl_cd, string cnstnty_cd, string svcrqst_asrqst_ind, string svcrqst_rtnorig_in, string svcrqst_vwasof_dt, string sum_reason_cd, string sum_reason, string crsr_master_claim_index, list<string> svcrqct_cds, string svcrqst_lupdt, timestamp crsr_lupdt, string cntevsds_lupdt, i32 ignore_me, list<string> notes}
                       serialization.format 	
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1178,6 +1186,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.ct_events_clean
                     numFiles 1
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct ct_events_clean { string contact_event_id, string ce_create_dt, string ce_end_dt, string contact_type, string cnctevs_cd, string contact_mode, string cntvnst_stts_cd, i32 total_transfers, list<string> ce_notes}
                     serialization.format 	
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1199,6 +1209,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.ct_events_clean
                       numFiles 1
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct ct_events_clean { string contact_event_id, string ce_create_dt, string ce_end_dt, string contact_type, string cnctevs_cd, string contact_mode, string cntvnst_stts_cd, i32 total_transfers, list<string> ce_notes}
                       serialization.format 	
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1281,6 +1293,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.service_request_clean
                     numFiles 1
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct service_request_clean { string cnctevn_id, string svcrqst_id, string svcrqst_crt_dts, i32 subject_seq_no, string plan_component, string cust_segment, string cnctyp_cd, string cnctmd_cd, string cnctevs_cd, string svcrtyp_cd, string svrstyp_cd, string cmpltyp_cd, string catsrsn_cd, string apealvl_cd, string cnstnty_cd, string svcrqst_asrqst_ind, string svcrqst_rtnorig_in, string svcrqst_vwasof_dt, string sum_reason_cd, string sum_reason, string crsr_master_claim_index, list<string> svcrqct_cds, string svcrqst_lupdt, timestamp crsr_lupdt, string cntevsds_lupdt, i32 ignore_me, list<string> notes}
                     serialization.format 	
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1302,6 +1316,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                       name default.service_request_clean
                       numFiles 1
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct service_request_clean { string cnctevn_id, string svcrqst_id, string svcrqst_crt_dts, i32 subject_seq_no, string plan_component, string cust_segment, string cnctyp_cd, string cnctmd_cd, string cnctevs_cd, string svcrtyp_cd, string svrstyp_cd, string cmpltyp_cd, string catsrsn_cd, string apealvl_cd, string cnstnty_cd, string svcrqst_asrqst_ind, string svcrqst_rtnorig_in, string svcrqst_vwasof_dt, string sum_reason_cd, string sum_reason, string crsr_master_claim_index, list<string> svcrqct_cds, string svcrqst_lupdt, timestamp crsr_lupdt, string cntevsds_lupdt, i32 ignore_me, list<string> notes}
                       serialization.format 	
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out b/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
index 1365626..0a62262 100644
--- a/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
+++ b/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
@@ -4652,15 +4652,20 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
+                            COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                             bucket_count -1
                             columns p_mfgr,p_name,p_size,r,dr,s
                             columns.comments 
                             columns.types string:string:int:int:int:double
 #### A masked pattern was here ####
                             name default.part_4
+                            numFiles 0
+                            numRows 0
+                            rawDataSize 0
                             serialization.ddl struct part_4 { string p_mfgr, string p_name, i32 p_size, i32 r, i32 dr, double s}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            totalSize 0
 #### A masked pattern was here ####
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: default.part_4
@@ -4770,15 +4775,20 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
+                            COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                             bucket_count -1
                             columns p_mfgr,p_name,p_size,s2,r,dr,cud,fv1
                             columns.comments 
                             columns.types string:string:int:int:int:int:double:int
 #### A masked pattern was here ####
                             name default.part_5
+                            numFiles 0
+                            numRows 0
+                            rawDataSize 0
                             serialization.ddl struct part_5 { string p_mfgr, string p_name, i32 p_size, i32 s2, i32 r, i32 dr, double cud, i32 fv1}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            totalSize 0
 #### A masked pattern was here ####
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: default.part_5
@@ -4798,15 +4808,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns p_mfgr,p_name,p_size,r,dr,s
                 columns.comments 
                 columns.types string:string:int:int:int:double
 #### A masked pattern was here ####
                 name default.part_4
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct part_4 { string p_mfgr, string p_name, i32 p_size, i32 r, i32 dr, double s}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.part_4
@@ -4824,15 +4839,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns p_mfgr,p_name,p_size,s2,r,dr,cud,fv1
                 columns.comments 
                 columns.types string:string:int:int:int:int:double:int
 #### A masked pattern was here ####
                 name default.part_5
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct part_5 { string p_mfgr, string p_name, i32 p_size, i32 s2, i32 r, i32 dr, double cud, i32 fv1}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.part_5

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/truncate_column.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/truncate_column.q.out b/ql/src/test/results/clientpositive/truncate_column.q.out
index adbddfa..2efba75 100644
--- a/ql/src/test/results/clientpositive/truncate_column.q.out
+++ b/ql/src/test/results/clientpositive/truncate_column.q.out
@@ -104,6 +104,8 @@ Retention:          	0
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
 	numFiles            	1                   
+	numRows             	10                  
+	rawDataSize         	94                  
 	totalSize           	150                 
 #### A masked pattern was here ####
 	 	 
@@ -176,6 +178,8 @@ Retention:          	0
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
 	numFiles            	1                   
+	numRows             	10                  
+	rawDataSize         	94                  
 	totalSize           	75                  
 #### A masked pattern was here ####
 	 	 
@@ -238,6 +242,8 @@ Retention:          	0
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
 	numFiles            	1                   
+	numRows             	10                  
+	rawDataSize         	94                  
 	totalSize           	75                  
 #### A masked pattern was here ####
 	 	 
@@ -375,6 +381,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	1                   
+	numRows             	10                  
+	rawDataSize         	94                  
 	totalSize           	150                 
 #### A masked pattern was here ####
 	 	 
@@ -438,6 +446,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	1                   
+	numRows             	10                  
+	rawDataSize         	94                  
 	totalSize           	75                  
 #### A masked pattern was here ####
 	 	 
@@ -581,6 +591,8 @@ Table:              	test_tab_part
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	numFiles            	1                   
+	numRows             	10                  
+	rawDataSize         	94                  
 	totalSize           	150                 
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out b/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out
index 39ffc6c..05ca155 100644
--- a/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out
+++ b/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out
@@ -88,8 +88,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.test_tab
               numFiles 2
+              numRows 500
               partition_columns part
               partition_columns.types string
+              rawDataSize 4812
               serialization.ddl struct test_tab { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
@@ -118,16 +120,16 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: test_tab
-          Statistics: Num rows: 17 Data size: 1761 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 500 Data size: 4812 Basic stats: COMPLETE Column stats: NONE
           GatherStats: false
           Filter Operator
             isSamplingPred: false
             predicate: (key = '484') (type: boolean)
-            Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: '484' (type: string), value (type: string), '1' (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
               ListSink
 
 PREHOOK: query: SELECT * FROM test_tab WHERE part = '1' AND key = '484'
@@ -166,8 +168,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.test_tab
               numFiles 2
+              numRows 500
               partition_columns part
               partition_columns.types string
+              rawDataSize 4812
               serialization.ddl struct test_tab { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
@@ -196,16 +200,16 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: test_tab
-          Statistics: Num rows: 17 Data size: 1761 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 500 Data size: 4812 Basic stats: COMPLETE Column stats: NONE
           GatherStats: false
           Filter Operator
             isSamplingPred: false
             predicate: (key = '0') (type: boolean)
-            Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: '0' (type: string), value (type: string), '1' (type: string)
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
               ListSink
 
 PREHOOK: query: SELECT * FROM test_tab WHERE part = '1' AND key = '0'


[05/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/pcr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/pcr.q.out b/ql/src/test/results/clientpositive/spark/pcr.q.out
index 3dd2147..21b7519 100644
--- a/ql/src/test/results/clientpositive/spark/pcr.q.out
+++ b/ql/src/test/results/clientpositive/spark/pcr.q.out
@@ -3460,15 +3460,20 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
+                            COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                             bucket_count -1
                             columns key,value
                             columns.comments 
                             columns.types int:string
 #### A masked pattern was here ####
                             name default.pcr_t2
+                            numFiles 0
+                            numRows 0
+                            rawDataSize 0
                             serialization.ddl struct pcr_t2 { i32 key, string value}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            totalSize 0
 #### A masked pattern was here ####
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: default.pcr_t2
@@ -3490,15 +3495,20 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
+                            COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                             bucket_count -1
                             columns key,value
                             columns.comments 
                             columns.types int:string
 #### A masked pattern was here ####
                             name default.pcr_t3
+                            numFiles 0
+                            numRows 0
+                            rawDataSize 0
                             serialization.ddl struct pcr_t3 { i32 key, string value}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            totalSize 0
 #### A masked pattern was here ####
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: default.pcr_t3
@@ -3565,15 +3575,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.pcr_t2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct pcr_t2 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.pcr_t2
@@ -3591,15 +3606,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.pcr_t3
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct pcr_t3 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.pcr_t3

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/reduce_deduplicate.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/reduce_deduplicate.q.out b/ql/src/test/results/clientpositive/spark/reduce_deduplicate.q.out
index 3f5f649..b20e8fe 100644
--- a/ql/src/test/results/clientpositive/spark/reduce_deduplicate.q.out
+++ b/ql/src/test/results/clientpositive/spark/reduce_deduplicate.q.out
@@ -112,6 +112,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count 2
                         bucket_field_name key
                         columns key,value
@@ -119,9 +120,13 @@ STAGE PLANS:
                         columns.types string:string
 #### A masked pattern was here ####
                         name default.bucket5_1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct bucket5_1 { string key, string value}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.bucket5_1
@@ -138,6 +143,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -145,9 +151,13 @@ STAGE PLANS:
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.bucket5_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucket5_1 { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucket5_1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/sample1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample1.q.out b/ql/src/test/results/clientpositive/spark/sample1.q.out
index 15b1016..eb9d5f6 100644
--- a/ql/src/test/results/clientpositive/spark/sample1.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample1.q.out
@@ -53,15 +53,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value,dt,hr
                               columns.comments 
                               columns.types int:string:string:string
 #### A masked pattern was here ####
                               name default.dest1
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest1
@@ -129,15 +134,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,dt,hr
                 columns.comments 
                 columns.types int:string:string:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/sample2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample2.q.out b/ql/src/test/results/clientpositive/spark/sample2.q.out
index 28108bb..b13f818 100644
--- a/ql/src/test/results/clientpositive/spark/sample2.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample2.q.out
@@ -54,15 +54,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value
                               columns.comments 
                               columns.types int:string
 #### A masked pattern was here ####
                               name default.dest1
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest1 { i32 key, string value}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest1
@@ -130,15 +135,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/sample4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample4.q.out b/ql/src/test/results/clientpositive/spark/sample4.q.out
index 25f300d..69066c1 100644
--- a/ql/src/test/results/clientpositive/spark/sample4.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample4.q.out
@@ -54,15 +54,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value
                               columns.comments 
                               columns.types int:string
 #### A masked pattern was here ####
                               name default.dest1
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest1 { i32 key, string value}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest1
@@ -130,15 +135,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/sample5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample5.q.out b/ql/src/test/results/clientpositive/spark/sample5.q.out
index 8aebfe5..819939c 100644
--- a/ql/src/test/results/clientpositive/spark/sample5.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample5.q.out
@@ -55,15 +55,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value
                               columns.comments 
                               columns.types int:string
 #### A masked pattern was here ####
                               name default.dest1
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest1 { i32 key, string value}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest1
@@ -131,15 +136,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/sample6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample6.q.out b/ql/src/test/results/clientpositive/spark/sample6.q.out
index 2b49712..bf06004 100644
--- a/ql/src/test/results/clientpositive/spark/sample6.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample6.q.out
@@ -52,15 +52,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value
                               columns.comments 
                               columns.types int:string
 #### A masked pattern was here ####
                               name default.dest1
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest1 { i32 key, string value}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest1
@@ -128,15 +133,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/sample7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample7.q.out b/ql/src/test/results/clientpositive/spark/sample7.q.out
index 6c17ab1..a821c76 100644
--- a/ql/src/test/results/clientpositive/spark/sample7.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample7.q.out
@@ -53,15 +53,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value
                               columns.comments 
                               columns.types int:string
 #### A masked pattern was here ####
                               name default.dest1
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest1 { i32 key, string value}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest1
@@ -129,15 +134,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/stats0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/stats0.q.out b/ql/src/test/results/clientpositive/spark/stats0.q.out
index e741320..0b14e21 100644
--- a/ql/src/test/results/clientpositive/spark/stats0.q.out
+++ b/ql/src/test/results/clientpositive/spark/stats0.q.out
@@ -45,15 +45,20 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
+                            COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                             bucket_count -1
                             columns key,value
                             columns.comments 
                             columns.types string:string
 #### A masked pattern was here ####
                             name default.stats_non_partitioned
+                            numFiles 0
+                            numRows 0
+                            rawDataSize 0
                             serialization.ddl struct stats_non_partitioned { string key, string value}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            totalSize 0
 #### A masked pattern was here ####
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: default.stats_non_partitioned
@@ -119,15 +124,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.stats_non_partitioned
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct stats_non_partitioned { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.stats_non_partitioned
@@ -1349,15 +1359,20 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
+                            COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                             bucket_count -1
                             columns key,value
                             columns.comments 
                             columns.types string:string
 #### A masked pattern was here ####
                             name default.stats_non_partitioned
+                            numFiles 0
+                            numRows 0
+                            rawDataSize 0
                             serialization.ddl struct stats_non_partitioned { string key, string value}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            totalSize 0
 #### A masked pattern was here ####
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: default.stats_non_partitioned
@@ -1423,15 +1438,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.stats_non_partitioned
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct stats_non_partitioned { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.stats_non_partitioned

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/stats1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/stats1.q.out b/ql/src/test/results/clientpositive/spark/stats1.q.out
index 588a33d..2f29d27 100644
--- a/ql/src/test/results/clientpositive/spark/stats1.q.out
+++ b/ql/src/test/results/clientpositive/spark/stats1.q.out
@@ -224,6 +224,8 @@ Retention:          	0
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
 	numFiles            	3                   
+	numRows             	26                  
+	rawDataSize         	199                 
 	totalSize           	1583                
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/stats18.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/stats18.q.out b/ql/src/test/results/clientpositive/spark/stats18.q.out
index 6971e44..3ad9679 100644
--- a/ql/src/test/results/clientpositive/spark/stats18.q.out
+++ b/ql/src/test/results/clientpositive/spark/stats18.q.out
@@ -94,6 +94,8 @@ Table:              	stats_part
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	numFiles            	2                   
+	numRows             	500                 
+	rawDataSize         	5312                
 	totalSize           	7170                
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/stats20.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/stats20.q.out b/ql/src/test/results/clientpositive/spark/stats20.q.out
index d7e52b4..a824bc9 100644
--- a/ql/src/test/results/clientpositive/spark/stats20.q.out
+++ b/ql/src/test/results/clientpositive/spark/stats20.q.out
@@ -55,6 +55,45 @@ Bucket Columns:     	[]
 Sort Columns:       	[]                  	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
+PREHOOK: query: describe formatted stats_partitioned partition (ds='1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@stats_partitioned
+POSTHOOK: query: describe formatted stats_partitioned partition (ds='1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@stats_partitioned
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[1]                 	 
+Database:           	default             	 
+Table:              	stats_partitioned   	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	1                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 PREHOOK: query: insert overwrite table stats_partitioned partition (ds='1')
 select * from src
 PREHOOK: type: QUERY
@@ -104,3 +143,42 @@ Bucket Columns:     	[]
 Sort Columns:       	[]                  	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
+PREHOOK: query: describe formatted stats_partitioned partition (ds='1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@stats_partitioned
+POSTHOOK: query: describe formatted stats_partitioned partition (ds='1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@stats_partitioned
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[1]                 	 
+Database:           	default             	 
+Table:              	stats_partitioned   	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	1                   
+	numRows             	500                 
+	rawDataSize         	0                   
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/stats3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/stats3.q.out b/ql/src/test/results/clientpositive/spark/stats3.q.out
index 4dbda94..7db4fa0 100644
--- a/ql/src/test/results/clientpositive/spark/stats3.q.out
+++ b/ql/src/test/results/clientpositive/spark/stats3.q.out
@@ -34,15 +34,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns col1
                 columns.comments 
                 columns.types string
 #### A masked pattern was here ####
                 name default.hive_test_src
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct hive_test_src { string col1}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.hive_test_src
@@ -76,6 +81,8 @@ Retention:          	0
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
 	numFiles            	1                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	11                  
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out b/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out
index 7b56585..f4b9584 100644
--- a/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out
@@ -4592,15 +4592,20 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
+                            COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                             bucket_count -1
                             columns p_mfgr,p_name,p_size,r,dr,s
                             columns.comments 
                             columns.types string:string:int:int:int:double
 #### A masked pattern was here ####
                             name default.part_4
+                            numFiles 0
+                            numRows 0
+                            rawDataSize 0
                             serialization.ddl struct part_4 { string p_mfgr, string p_name, i32 p_size, i32 r, i32 dr, double s}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            totalSize 0
 #### A masked pattern was here ####
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: default.part_4
@@ -4710,15 +4715,20 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
+                            COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                             bucket_count -1
                             columns p_mfgr,p_name,p_size,s2,r,dr,cud,fv1
                             columns.comments 
                             columns.types string:string:int:int:int:int:double:int
 #### A masked pattern was here ####
                             name default.part_5
+                            numFiles 0
+                            numRows 0
+                            rawDataSize 0
                             serialization.ddl struct part_5 { string p_mfgr, string p_name, i32 p_size, i32 s2, i32 r, i32 dr, double cud, i32 fv1}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            totalSize 0
 #### A masked pattern was here ####
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: default.part_5
@@ -4791,15 +4801,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns p_mfgr,p_name,p_size,r,dr,s
                 columns.comments 
                 columns.types string:string:int:int:int:double
 #### A masked pattern was here ####
                 name default.part_4
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct part_4 { string p_mfgr, string p_name, i32 p_size, i32 r, i32 dr, double s}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.part_4
@@ -4817,15 +4832,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns p_mfgr,p_name,p_size,s2,r,dr,cud,fv1
                 columns.comments 
                 columns.types string:string:int:int:int:int:double:int
 #### A masked pattern was here ####
                 name default.part_5
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct part_5 { string p_mfgr, string p_name, i32 p_size, i32 s2, i32 r, i32 dr, double cud, i32 fv1}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.part_5

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/stats0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats0.q.out b/ql/src/test/results/clientpositive/stats0.q.out
index 90a9a70..bbe38c1 100644
--- a/ql/src/test/results/clientpositive/stats0.q.out
+++ b/ql/src/test/results/clientpositive/stats0.q.out
@@ -42,15 +42,20 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                       bucket_count -1
                       columns key,value
                       columns.comments 
                       columns.types string:string
 #### A masked pattern was here ####
                       name default.stats_non_partitioned
+                      numFiles 0
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct stats_non_partitioned { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 0
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.stats_non_partitioned
@@ -116,15 +121,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.stats_non_partitioned
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct stats_non_partitioned { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.stats_non_partitioned
@@ -1345,15 +1355,20 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                       bucket_count -1
                       columns key,value
                       columns.comments 
                       columns.types string:string
 #### A masked pattern was here ####
                       name default.stats_non_partitioned
+                      numFiles 0
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct stats_non_partitioned { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 0
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.stats_non_partitioned
@@ -1428,15 +1443,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.stats_non_partitioned
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct stats_non_partitioned { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.stats_non_partitioned
@@ -1459,15 +1479,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types string:string
 #### A masked pattern was here ####
                     name default.stats_non_partitioned
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct stats_non_partitioned { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.stats_non_partitioned
@@ -1483,30 +1508,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types string:string
 #### A masked pattern was here ####
               name default.stats_non_partitioned
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct stats_non_partitioned { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.stats_non_partitioned
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct stats_non_partitioned { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.stats_non_partitioned
@@ -1528,15 +1563,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types string:string
 #### A masked pattern was here ####
                     name default.stats_non_partitioned
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct stats_non_partitioned { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.stats_non_partitioned
@@ -1552,30 +1592,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value
               columns.comments 
               columns.types string:string
 #### A masked pattern was here ####
               name default.stats_non_partitioned
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct stats_non_partitioned { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.stats_non_partitioned
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct stats_non_partitioned { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.stats_non_partitioned

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/stats1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats1.q.out b/ql/src/test/results/clientpositive/stats1.q.out
index 72c53e3..ac076ec 100644
--- a/ql/src/test/results/clientpositive/stats1.q.out
+++ b/ql/src/test/results/clientpositive/stats1.q.out
@@ -232,6 +232,8 @@ Retention:          	0
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
 	numFiles            	3                   
+	numRows             	26                  
+	rawDataSize         	199                 
 	totalSize           	1583                
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/stats11.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats11.q.out b/ql/src/test/results/clientpositive/stats11.q.out
index 8ca5b27..9395773 100644
--- a/ql/src/test/results/clientpositive/stats11.q.out
+++ b/ql/src/test/results/clientpositive/stats11.q.out
@@ -88,6 +88,8 @@ Table:              	srcbucket_mapjoin_part
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	numFiles            	1                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	1358                
 #### A masked pattern was here ####
 	 	 
@@ -132,6 +134,8 @@ Table:              	srcbucket_mapjoin_part
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	numFiles            	2                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	2750                
 #### A masked pattern was here ####
 	 	 
@@ -176,6 +180,8 @@ Table:              	srcbucket_mapjoin_part
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	numFiles            	3                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	4200                
 #### A masked pattern was here ####
 	 	 
@@ -220,6 +226,8 @@ Table:              	srcbucket_mapjoin_part
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	numFiles            	4                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	5812                
 #### A masked pattern was here ####
 	 	 
@@ -327,8 +335,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.srcbucket_mapjoin_part
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -414,15 +424,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value1,value2
                           columns.comments 
                           columns.types string:string:string
 #### A masked pattern was here ####
                           name default.bucketmapjoin_tmp_result
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.bucketmapjoin_tmp_result
@@ -448,6 +463,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin
               numFiles 2
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -466,6 +483,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.srcbucket_mapjoin
                 numFiles 2
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -495,15 +514,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -526,15 +550,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -550,30 +579,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -595,15 +634,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value1,value2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.bucketmapjoin_tmp_result
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.bucketmapjoin_tmp_result
@@ -619,30 +663,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value1,value2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.bucketmapjoin_tmp_result
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value1,value2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.bucketmapjoin_tmp_result
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.bucketmapjoin_tmp_result
@@ -887,8 +941,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.srcbucket_mapjoin_part
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/stats18.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats18.q.out b/ql/src/test/results/clientpositive/stats18.q.out
index 6971e44..3ad9679 100644
--- a/ql/src/test/results/clientpositive/stats18.q.out
+++ b/ql/src/test/results/clientpositive/stats18.q.out
@@ -94,6 +94,8 @@ Table:              	stats_part
 #### A masked pattern was here ####
 Partition Parameters:	 	 
 	numFiles            	2                   
+	numRows             	500                 
+	rawDataSize         	5312                
 	totalSize           	7170                
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/stats20.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats20.q.out b/ql/src/test/results/clientpositive/stats20.q.out
index d7e52b4..a824bc9 100644
--- a/ql/src/test/results/clientpositive/stats20.q.out
+++ b/ql/src/test/results/clientpositive/stats20.q.out
@@ -55,6 +55,45 @@ Bucket Columns:     	[]
 Sort Columns:       	[]                  	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
+PREHOOK: query: describe formatted stats_partitioned partition (ds='1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@stats_partitioned
+POSTHOOK: query: describe formatted stats_partitioned partition (ds='1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@stats_partitioned
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[1]                 	 
+Database:           	default             	 
+Table:              	stats_partitioned   	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	1                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 PREHOOK: query: insert overwrite table stats_partitioned partition (ds='1')
 select * from src
 PREHOOK: type: QUERY
@@ -104,3 +143,42 @@ Bucket Columns:     	[]
 Sort Columns:       	[]                  	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
+PREHOOK: query: describe formatted stats_partitioned partition (ds='1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@stats_partitioned
+POSTHOOK: query: describe formatted stats_partitioned partition (ds='1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@stats_partitioned
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[1]                 	 
+Database:           	default             	 
+Table:              	stats_partitioned   	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	1                   
+	numRows             	500                 
+	rawDataSize         	0                   
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/stats3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats3.q.out b/ql/src/test/results/clientpositive/stats3.q.out
index 4dbda94..7db4fa0 100644
--- a/ql/src/test/results/clientpositive/stats3.q.out
+++ b/ql/src/test/results/clientpositive/stats3.q.out
@@ -34,15 +34,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns col1
                 columns.comments 
                 columns.types string
 #### A masked pattern was here ####
                 name default.hive_test_src
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct hive_test_src { string col1}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.hive_test_src
@@ -76,6 +81,8 @@ Retention:          	0
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
 	numFiles            	1                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	11                  
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_1.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_1.q.out
index 892539d..dffe096 100644
--- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_1.q.out
+++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_1.q.out
@@ -163,8 +163,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -251,8 +253,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -296,8 +300,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -456,8 +462,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -501,8 +509,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -574,8 +584,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -733,8 +745,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -778,8 +792,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -851,8 +867,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_11.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_11.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_11.q.out
index 954fd96..8a553d7 100644
--- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_11.q.out
+++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_11.q.out
@@ -159,8 +159,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -246,8 +248,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -290,8 +294,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -441,8 +447,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -528,8 +536,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -572,8 +582,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -713,8 +725,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -796,8 +810,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -840,8 +856,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -981,8 +999,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1067,8 +1087,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1111,8 +1133,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1179,8 +1203,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1223,8 +1249,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_12.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_12.q.out
index 578880d..dfcf7ea 100644
--- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_12.q.out
+++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_12.q.out
@@ -197,8 +197,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -269,8 +271,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_medium
                     numFiles 3
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_medium { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -371,8 +375,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -416,8 +422,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -481,8 +489,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_medium
                     numFiles 3
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_medium { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe


[21/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/244ce09c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/244ce09c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/244ce09c

Branch: refs/heads/master
Commit: 244ce09c935050b7d30c8beb507e50a1413ae70e
Parents: 2a8e388
Author: Pengcheng Xiong <px...@apache.org>
Authored: Mon May 9 10:01:38 2016 -0700
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Mon May 9 10:41:03 2016 -0700

----------------------------------------------------------------------
 .../hadoop/hive/common/StatsSetupConst.java     |  19 +-
 .../hadoop/hive/ql/history/TestHiveHistory.java |   2 +-
 .../hadoop/hive/metastore/MetaStoreUtils.java   |   7 +
 .../apache/hadoop/hive/ql/exec/MoveTask.java    |  20 +-
 .../apache/hadoop/hive/ql/exec/StatsTask.java   |  73 +-
 .../apache/hadoop/hive/ql/metadata/Hive.java    |  34 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |  14 +
 .../hive/ql/parse/ImportSemanticAnalyzer.java   |   2 +-
 .../hadoop/hive/ql/plan/CreateTableDesc.java    |  11 +
 .../hadoop/hive/ql/exec/TestExecDriver.java     |   2 +-
 .../clientpositive/alter_table_add_partition.q  |  13 +
 .../insert_values_orig_table_use_metadata.q     | 121 +++
 ql/src/test/queries/clientpositive/stats20.q    |   2 +
 .../clientnegative/alter_file_format.q.out      |   5 +
 .../clientnegative/unset_table_property.q.out   |   2 +
 .../clientpositive/alter_file_format.q.out      |  37 +
 .../alter_partition_clusterby_sortby.q.out      |   9 +
 .../clientpositive/alter_skewed_table.q.out     |  21 +
 .../alter_table_add_partition.q.out             | 202 ++++
 .../clientpositive/alter_table_not_sorted.q.out |   7 +
 .../clientpositive/auto_sortmerge_join_1.q.out  |  30 +
 .../clientpositive/auto_sortmerge_join_11.q.out |  32 +
 .../clientpositive/auto_sortmerge_join_12.q.out |  14 +
 .../clientpositive/auto_sortmerge_join_2.q.out  |  26 +
 .../clientpositive/auto_sortmerge_join_3.q.out  |  24 +
 .../clientpositive/auto_sortmerge_join_4.q.out  |  24 +
 .../clientpositive/auto_sortmerge_join_5.q.out  |  24 +
 .../clientpositive/auto_sortmerge_join_7.q.out  |  36 +
 .../clientpositive/auto_sortmerge_join_8.q.out  |  36 +
 .../clientpositive/binary_output_format.q.out   |  40 +
 .../test/results/clientpositive/bucket1.q.out   |  10 +
 .../test/results/clientpositive/bucket2.q.out   |  10 +
 .../test/results/clientpositive/bucket4.q.out   |  10 +
 .../test/results/clientpositive/bucket5.q.out   |  50 +
 .../results/clientpositive/bucket_many.q.out    |  10 +
 .../clientpositive/bucket_map_join_1.q.out      |   4 +
 .../clientpositive/bucket_map_join_2.q.out      |   4 +
 .../clientpositive/bucket_map_join_spark1.q.out |  22 +
 .../clientpositive/bucket_map_join_spark2.q.out |  22 +
 .../clientpositive/bucket_map_join_spark3.q.out |  22 +
 .../clientpositive/bucketcontext_1.q.out        |  10 +
 .../clientpositive/bucketcontext_2.q.out        |  10 +
 .../clientpositive/bucketcontext_3.q.out        |   8 +
 .../clientpositive/bucketcontext_4.q.out        |   8 +
 .../clientpositive/bucketcontext_5.q.out        |   8 +
 .../clientpositive/bucketcontext_6.q.out        |   8 +
 .../clientpositive/bucketcontext_7.q.out        |  12 +
 .../clientpositive/bucketcontext_8.q.out        |  12 +
 .../results/clientpositive/bucketmapjoin1.q.out |  48 +
 .../clientpositive/bucketmapjoin10.q.out        |   8 +
 .../clientpositive/bucketmapjoin11.q.out        |  16 +
 .../clientpositive/bucketmapjoin12.q.out        |   8 +
 .../results/clientpositive/bucketmapjoin2.q.out |  54 +
 .../results/clientpositive/bucketmapjoin3.q.out |  48 +
 .../results/clientpositive/bucketmapjoin4.q.out |  48 +
 .../results/clientpositive/bucketmapjoin5.q.out |  48 +
 .../results/clientpositive/bucketmapjoin7.q.out |   4 +
 .../results/clientpositive/bucketmapjoin8.q.out |   8 +
 .../results/clientpositive/bucketmapjoin9.q.out |   8 +
 .../clientpositive/bucketmapjoin_negative.q.out |  46 +
 .../bucketmapjoin_negative2.q.out               |  48 +
 .../bucketmapjoin_negative3.q.out               |  36 +
 .../columnStatsUpdateForStatsOptimizer_1.q.out  |   2 +
 ...names_with_leading_and_trailing_spaces.q.out |   5 +
 .../clientpositive/columnstats_partlvl.q.out    |   4 +
 .../clientpositive/columnstats_tbllvl.q.out     |   8 +
 .../create_alter_list_bucketing_table1.q.out    |  15 +
 .../results/clientpositive/create_like.q.out    |  12 +
 .../clientpositive/create_like_view.q.out       |   5 +
 .../clientpositive/create_skewed_table1.q.out   |  15 +
 .../clientpositive/database_location.q.out      |  10 +
 .../clientpositive/default_file_format.q.out    |  15 +
 .../describe_comment_indent.q.out               |   5 +
 .../describe_comment_nonascii.q.out             |   2 +
 .../describe_formatted_view_partitioned.q.out   |   5 +
 .../clientpositive/describe_syntax.q.out        |  10 +
 .../disable_merge_for_bucketing.q.out           |  10 +
 .../display_colstats_tbllvl.q.out               |   4 +
 .../encryption_join_unencrypted_tbl.q.out       |   4 +
 .../clientpositive/groupby_map_ppr.q.out        |  10 +
 .../groupby_map_ppr_multi_distinct.q.out        |  10 +
 .../results/clientpositive/groupby_ppr.q.out    |  10 +
 .../groupby_ppr_multi_distinct.q.out            |  10 +
 .../clientpositive/groupby_sort_1_23.q.out      | 140 +++
 .../results/clientpositive/groupby_sort_6.q.out |  12 +
 .../clientpositive/groupby_sort_skew_1_23.q.out | 140 +++
 .../results/clientpositive/input_part1.q.out    |  40 +
 .../results/clientpositive/input_part2.q.out    |  80 ++
 .../insert_values_orig_table_use_metadata.q.out | 994 +++++++++++++++++++
 ql/src/test/results/clientpositive/join17.q.out |  10 +
 ql/src/test/results/clientpositive/join26.q.out |  40 +
 ql/src/test/results/clientpositive/join32.q.out |  10 +
 .../clientpositive/join32_lessSize.q.out        |  20 +
 ql/src/test/results/clientpositive/join33.q.out |  10 +
 ql/src/test/results/clientpositive/join34.q.out |  15 +
 ql/src/test/results/clientpositive/join35.q.out |  15 +
 ql/src/test/results/clientpositive/join9.q.out  |  10 +
 .../results/clientpositive/join_map_ppr.q.out   |  40 +
 .../clientpositive/list_bucket_dml_14.q.out     |  10 +
 .../list_bucket_dml_8.q.java1.7.out             |  10 +-
 .../clientpositive/mapjoin_memcheck.q.out       |  16 +-
 .../results/clientpositive/metadataonly1.q.out  | 100 ++
 .../results/clientpositive/nullformat.q.out     |   5 +
 .../results/clientpositive/orc_create.q.out     |  10 +
 .../test/results/clientpositive/orc_llap.q.out  |  54 +-
 .../clientpositive/orc_predicate_pushdown.q.out | 128 +--
 .../parquet_array_null_element.q.out            |   5 +
 .../results/clientpositive/parquet_create.q.out |   5 +
 .../parquet_mixed_partition_formats.q.out       |   4 +
 .../results/clientpositive/parquet_serde.q.out  |   4 +
 .../clientpositive/part_inherit_tbl_props.q.out |   5 +
 .../part_inherit_tbl_props_empty.q.out          |   5 +
 .../part_inherit_tbl_props_with_star.q.out      |   5 +
 .../partition_coltype_literals.q.out            |  14 +
 ql/src/test/results/clientpositive/pcr.q.out    |  80 ++
 .../clientpositive/rand_partitionpruner2.q.out  |  40 +
 .../clientpositive/rcfile_default_format.q.out  |  15 +
 .../clientpositive/reduce_deduplicate.q.out     |  10 +
 .../test/results/clientpositive/sample1.q.out   |  40 +
 .../test/results/clientpositive/sample2.q.out   |  40 +
 .../test/results/clientpositive/sample4.q.out   |  40 +
 .../test/results/clientpositive/sample5.q.out   |  40 +
 .../test/results/clientpositive/sample6.q.out   |  40 +
 .../test/results/clientpositive/sample7.q.out   |  40 +
 ...schema_evol_orc_nonvec_fetchwork_table.q.out | 250 ++---
 .../schema_evol_orc_nonvec_mapwork_table.q.out  | 250 ++---
 .../schema_evol_orc_vec_mapwork_table.q.out     | 250 ++---
 .../schema_evol_text_nonvec_mapwork_table.q.out | 250 ++---
 .../schema_evol_text_vec_mapwork_table.q.out    | 250 ++---
 .../schema_evol_text_vecrow_mapwork_table.q.out | 250 ++---
 .../show_create_table_alter.q.out               |  13 +
 .../show_create_table_db_table.q.out            |   5 +
 .../show_create_table_serde.q.out               |  17 +
 .../clientpositive/show_tblproperties.q.out     |  10 +
 .../spark/auto_sortmerge_join_1.q.out           |  14 +
 .../spark/auto_sortmerge_join_12.q.out          |  10 +
 .../spark/auto_sortmerge_join_3.q.out           |  10 +
 .../spark/auto_sortmerge_join_4.q.out           |  10 +
 .../spark/auto_sortmerge_join_5.q.out           |  16 +
 .../spark/auto_sortmerge_join_7.q.out           |  16 +
 .../spark/auto_sortmerge_join_8.q.out           |  16 +
 .../results/clientpositive/spark/bucket2.q.out  |  10 +
 .../results/clientpositive/spark/bucket4.q.out  |  10 +
 .../results/clientpositive/spark/bucket5.q.out  |  20 +
 .../spark/bucket_map_join_1.q.out               |   8 +
 .../spark/bucket_map_join_2.q.out               |   8 +
 .../spark/bucket_map_join_spark1.q.out          |  18 +
 .../spark/bucket_map_join_spark2.q.out          |  18 +
 .../spark/bucket_map_join_spark3.q.out          |  18 +
 .../clientpositive/spark/bucketmapjoin1.q.out   |  22 +
 .../clientpositive/spark/bucketmapjoin10.q.out  |   8 +
 .../clientpositive/spark/bucketmapjoin11.q.out  |  16 +
 .../clientpositive/spark/bucketmapjoin12.q.out  |   8 +
 .../clientpositive/spark/bucketmapjoin2.q.out   |  24 +
 .../clientpositive/spark/bucketmapjoin3.q.out   |  18 +
 .../clientpositive/spark/bucketmapjoin4.q.out   |  26 +
 .../clientpositive/spark/bucketmapjoin5.q.out   |  26 +
 .../clientpositive/spark/bucketmapjoin7.q.out   |   4 +
 .../clientpositive/spark/bucketmapjoin8.q.out   |   8 +
 .../clientpositive/spark/bucketmapjoin9.q.out   |   8 +
 .../spark/bucketmapjoin_negative.q.out          |  16 +
 .../spark/bucketmapjoin_negative2.q.out         |  18 +
 .../spark/bucketmapjoin_negative3.q.out         |  72 ++
 .../spark/disable_merge_for_bucketing.q.out     |  10 +
 .../clientpositive/spark/groupby_map_ppr.q.out  |  10 +
 .../spark/groupby_map_ppr_multi_distinct.q.out  |  10 +
 .../clientpositive/spark/groupby_ppr.q.out      |  10 +
 .../spark/groupby_ppr_multi_distinct.q.out      |  10 +
 .../spark/groupby_sort_1_23.q.out               |  50 +
 .../spark/groupby_sort_skew_1_23.q.out          |  50 +
 .../clientpositive/spark/input_part2.q.out      |  20 +
 .../results/clientpositive/spark/join17.q.out   |  10 +
 .../results/clientpositive/spark/join26.q.out   |  10 +
 .../results/clientpositive/spark/join32.q.out   |  10 +
 .../clientpositive/spark/join32_lessSize.q.out  |  20 +
 .../results/clientpositive/spark/join33.q.out   |  10 +
 .../results/clientpositive/spark/join34.q.out   |  10 +
 .../results/clientpositive/spark/join35.q.out   |  10 +
 .../results/clientpositive/spark/join9.q.out    |  10 +
 .../clientpositive/spark/join_map_ppr.q.out     |  10 +
 .../clientpositive/spark/mapjoin_memcheck.q.out |  16 +-
 .../test/results/clientpositive/spark/pcr.q.out |  20 +
 .../spark/reduce_deduplicate.q.out              |  10 +
 .../results/clientpositive/spark/sample1.q.out  |  10 +
 .../results/clientpositive/spark/sample2.q.out  |  10 +
 .../results/clientpositive/spark/sample4.q.out  |  10 +
 .../results/clientpositive/spark/sample5.q.out  |  10 +
 .../results/clientpositive/spark/sample6.q.out  |  10 +
 .../results/clientpositive/spark/sample7.q.out  |  10 +
 .../results/clientpositive/spark/stats0.q.out   |  20 +
 .../results/clientpositive/spark/stats1.q.out   |   2 +
 .../results/clientpositive/spark/stats18.q.out  |   2 +
 .../results/clientpositive/spark/stats20.q.out  |  78 ++
 .../results/clientpositive/spark/stats3.q.out   |   7 +
 .../clientpositive/spark/vectorized_ptf.q.out   |  20 +
 ql/src/test/results/clientpositive/stats0.q.out |  50 +
 ql/src/test/results/clientpositive/stats1.q.out |   2 +
 .../test/results/clientpositive/stats11.q.out   |  56 ++
 .../test/results/clientpositive/stats18.q.out   |   2 +
 .../test/results/clientpositive/stats20.q.out   |  78 ++
 ql/src/test/results/clientpositive/stats3.q.out |   7 +
 .../tez/auto_sortmerge_join_1.q.out             |  18 +
 .../tez/auto_sortmerge_join_11.q.out            |  28 +
 .../tez/auto_sortmerge_join_12.q.out            |  10 +
 .../tez/auto_sortmerge_join_2.q.out             |  12 +
 .../tez/auto_sortmerge_join_3.q.out             |  18 +
 .../tez/auto_sortmerge_join_4.q.out             |  18 +
 .../tez/auto_sortmerge_join_5.q.out             |  24 +
 .../tez/auto_sortmerge_join_7.q.out             |  24 +
 .../tez/auto_sortmerge_join_8.q.out             |  24 +
 .../results/clientpositive/tez/bucket2.q.out    |  10 +
 .../results/clientpositive/tez/bucket4.q.out    |  10 +
 ...names_with_leading_and_trailing_spaces.q.out |   5 +
 .../tez/disable_merge_for_bucketing.q.out       |  10 +
 .../clientpositive/tez/explainuser_1.q.out      |  18 +-
 .../clientpositive/tez/metadataonly1.q.out      | 100 ++
 .../results/clientpositive/tez/sample1.q.out    |  10 +
 ...schema_evol_orc_nonvec_fetchwork_table.q.out | 154 +--
 .../schema_evol_orc_nonvec_mapwork_table.q.out  | 154 +--
 .../tez/schema_evol_orc_vec_mapwork_table.q.out | 154 +--
 .../schema_evol_text_nonvec_mapwork_table.q.out | 154 +--
 .../schema_evol_text_vec_mapwork_table.q.out    | 154 +--
 .../schema_evol_text_vecrow_mapwork_table.q.out | 154 +--
 .../tez/tez_join_result_complex.q.out           |  16 +
 .../clientpositive/tez/vectorized_ptf.q.out     |  20 +
 .../clientpositive/truncate_column.q.out        |  12 +
 .../truncate_column_list_bucket.q.out           |  16 +-
 .../clientpositive/unicode_notation.q.out       |  15 +
 .../unset_table_view_property.q.out             |  21 +
 .../results/clientpositive/vectorized_ptf.q.out |  20 +
 230 files changed, 6837 insertions(+), 1336 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java b/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
index 41d150c..1466b69 100644
--- a/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
+++ b/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
@@ -222,16 +222,6 @@ public class StatsSetupConst {
           // old format of statsAcc, e.g., TRUE or FALSE
           LOG.debug("In StatsSetupConst, JsonParser can not parse statsAcc.");
           stats = new JSONObject(new LinkedHashMap());
-          try {
-            if (statsAcc.equals(TRUE)) {
-              stats.put(BASIC_STATS, TRUE);
-            } else {
-              stats.put(BASIC_STATS, FALSE);
-            }
-          } catch (JSONException e1) {
-            // impossible to throw any json exceptions.
-            LOG.trace(e1.getMessage());
-          }
         }
         if (!stats.has(BASIC_STATS)) {
           // duplicate key is not possible
@@ -332,4 +322,13 @@ public class StatsSetupConst {
       params.put(COLUMN_STATS_ACCURATE, stats.toString());
     }
   }
+
+  public static void setBasicStatsStateForCreateTable(Map<String, String> params, String setting) {
+    if (TRUE.equals(setting)) {
+      for (String stat : StatsSetupConst.supportedStats) {
+        params.put(stat, "0");
+      }
+    }
+    setBasicStatsState(params, setting);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
index c046708..76c1636 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
@@ -103,7 +103,7 @@ public class TestHiveHistory extends TestCase {
         db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true);
         db.createTable(src, cols, null, TextInputFormat.class,
             IgnoreKeyTextOutputFormat.class);
-        db.loadTable(hadoopDataFile[i], src, false, false, false, false);
+        db.loadTable(hadoopDataFile[i], src, false, false, false, false, false);
         i++;
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index 76220f4..da3da8b 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -265,6 +265,13 @@ public class MetaStoreUtils {
   public static boolean requireCalStats(Configuration hiveConf, Partition oldPart,
     Partition newPart, Table tbl, EnvironmentContext environmentContext) {
 
+    if (environmentContext != null
+        && environmentContext.isSetProperties()
+        && StatsSetupConst.TRUE.equals(environmentContext.getProperties().get(
+            StatsSetupConst.DO_NOT_UPDATE_STATS))) {
+      return false;
+    }
+
     if (MetaStoreUtils.isView(tbl)) {
       return false;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
index c2c6c65..bdda89a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
@@ -223,6 +223,14 @@ public class MoveTask extends Task<MoveWork> implements Serializable {
     }
   }
 
+  // we check if there is only one immediate child task and it is stats task
+  private boolean hasFollowingStatsTask() {
+    if (this.getNumChild() == 1) {
+      return this.getChildTasks().get(0) instanceof StatsTask;
+    }
+    return false;
+  }
+
   @Override
   public int execute(DriverContext driverContext) {
 
@@ -336,10 +344,10 @@ public class MoveTask extends Task<MoveWork> implements Serializable {
         DataContainer dc = null;
         if (tbd.getPartitionSpec().size() == 0) {
           dc = new DataContainer(table.getTTable());
-          db.loadTable(tbd.getSourcePath(), tbd.getTable()
-              .getTableName(), tbd.getReplace(), work.isSrcLocal(),
-              isSkewedStoredAsDirs(tbd),
-              work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID);
+          db.loadTable(tbd.getSourcePath(), tbd.getTable().getTableName(), tbd.getReplace(),
+              work.isSrcLocal(), isSkewedStoredAsDirs(tbd),
+              work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID,
+              hasFollowingStatsTask());
           if (work.getOutputs() != null) {
             work.getOutputs().add(new WriteEntity(table,
                 (tbd.getReplace() ? WriteEntity.WriteType.INSERT_OVERWRITE :
@@ -421,7 +429,7 @@ public class MoveTask extends Task<MoveWork> implements Serializable {
                 dpCtx.getNumDPCols(),
                 isSkewedStoredAsDirs(tbd),
                 work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID,
-                SessionState.get().getTxnMgr().getCurrentTxnId());
+                SessionState.get().getTxnMgr().getCurrentTxnId(), hasFollowingStatsTask());
 
             console.printInfo("\t Time taken to load dynamic partitions: "  +
                 (System.currentTimeMillis() - startTime)/1000.0 + " seconds");
@@ -480,7 +488,7 @@ public class MoveTask extends Task<MoveWork> implements Serializable {
             db.loadPartition(tbd.getSourcePath(), tbd.getTable().getTableName(),
                 tbd.getPartitionSpec(), tbd.getReplace(),
                 tbd.getInheritTableSpecs(), isSkewedStoredAsDirs(tbd), work.isSrcLocal(),
-                work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID);
+                work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID, hasFollowingStatsTask());
             Partition partn = db.getPartition(table, tbd.getPartitionSpec(), false);
 
             if (bucketCols != null || sortCols != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
index 87a7667..f3c7e99 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
@@ -88,7 +88,7 @@ public class StatsTask extends Task<StatsWork> implements Serializable {
   public int execute(DriverContext driverContext) {
 
     LOG.info("Executing stats task");
-    // Make sure that it is either an ANALYZE, INSERT OVERWRITE or CTAS command
+    // Make sure that it is either an ANALYZE, INSERT OVERWRITE (maybe load) or CTAS command
     short workComponentsPresent = 0;
     if (work.getLoadTableDesc() != null) {
       workComponentsPresent++;
@@ -163,6 +163,16 @@ public class StatsTask extends Task<StatsWork> implements Serializable {
       if (partitions == null) {
         org.apache.hadoop.hive.metastore.api.Table tTable = table.getTTable();
         Map<String, String> parameters = tTable.getParameters();
+        // In the following scenarios, we need to reset the stats to true.
+        // work.getTableSpecs() != null means analyze command
+        // work.getLoadTableDesc().getReplace() is true means insert overwrite command 
+        // work.getLoadFileDesc().getDestinationCreateTable().isEmpty() means CTAS etc.
+        if (work.getTableSpecs() != null
+            || (work.getLoadTableDesc() != null && work.getLoadTableDesc().getReplace())
+            || (work.getLoadFileDesc() != null && !work.getLoadFileDesc()
+                .getDestinationCreateTable().isEmpty())) {
+          StatsSetupConst.setBasicStatsState(parameters, StatsSetupConst.TRUE);
+        }
         // non-partitioned tables:
         if (!existStats(parameters) && atomic) {
           return 0;
@@ -171,20 +181,22 @@ public class StatsTask extends Task<StatsWork> implements Serializable {
         // The collectable stats for the aggregator needs to be cleared.
         // For eg. if a file is being loaded, the old number of rows are not valid
         if (work.isClearAggregatorStats()) {
-          clearStats(parameters);
-        }
-
-        if (statsAggregator != null) {
-          String prefix = getAggregationPrefix(table, null);
-          updateStats(statsAggregator, parameters, prefix, atomic);
+          // we choose to keep the invalid stats and only change the setting.
+          StatsSetupConst.setBasicStatsState(parameters, StatsSetupConst.FALSE);
         }
 
         updateQuickStats(wh, parameters, tTable.getSd());
-
-        // write table stats to metastore
-        if (!getWork().getNoStatsAggregator()) {
-          environmentContext = new EnvironmentContext();
-          environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, StatsSetupConst.TASK);
+        if (StatsSetupConst.areBasicStatsUptoDate(parameters)) {
+          if (statsAggregator != null) {
+            String prefix = getAggregationPrefix(table, null);
+            updateStats(statsAggregator, parameters, prefix, atomic);
+          }
+          // write table stats to metastore
+          if (!getWork().getNoStatsAggregator()) {
+            environmentContext = new EnvironmentContext();
+            environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED,
+                StatsSetupConst.TASK);
+          }
         }
 
         getHive().alterTable(tableFullName, new Table(tTable), environmentContext);
@@ -203,6 +215,12 @@ public class StatsTask extends Task<StatsWork> implements Serializable {
           //
           org.apache.hadoop.hive.metastore.api.Partition tPart = partn.getTPartition();
           Map<String, String> parameters = tPart.getParameters();
+          if (work.getTableSpecs() != null
+              || (work.getLoadTableDesc() != null && work.getLoadTableDesc().getReplace())
+              || (work.getLoadFileDesc() != null && !work.getLoadFileDesc()
+                  .getDestinationCreateTable().isEmpty())) {
+            StatsSetupConst.setBasicStatsState(parameters, StatsSetupConst.TRUE);
+          }
           if (!existStats(parameters) && atomic) {
             continue;
           }
@@ -210,20 +228,21 @@ public class StatsTask extends Task<StatsWork> implements Serializable {
           // The collectable stats for the aggregator needs to be cleared.
           // For eg. if a file is being loaded, the old number of rows are not valid
           if (work.isClearAggregatorStats()) {
-            clearStats(parameters);
-          }
-
-          if (statsAggregator != null) {
-            String prefix = getAggregationPrefix(table, partn);
-            updateStats(statsAggregator, parameters, prefix, atomic);
+            // we choose to keep the invalid stats and only change the setting.
+            StatsSetupConst.setBasicStatsState(parameters, StatsSetupConst.FALSE);
           }
 
           updateQuickStats(wh, parameters, tPart.getSd());
-
-          if (!getWork().getNoStatsAggregator()) {
-            environmentContext = new EnvironmentContext();
-            environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED,
-                StatsSetupConst.TASK);
+          if (StatsSetupConst.areBasicStatsUptoDate(parameters)) {
+            if (statsAggregator != null) {
+              String prefix = getAggregationPrefix(table, partn);
+              updateStats(statsAggregator, parameters, prefix, atomic);
+            }
+            if (!getWork().getNoStatsAggregator()) {
+              environmentContext = new EnvironmentContext();
+              environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED,
+                  StatsSetupConst.TASK);
+            }
           }
           updates.add(new Partition(table, tPart));
 
@@ -346,14 +365,6 @@ public class StatsTask extends Task<StatsWork> implements Serializable {
     MetaStoreUtils.populateQuickStats(partfileStatus, parameters);
   }
 
-  private void clearStats(Map<String, String> parameters) {
-    for (String statType : StatsSetupConst.supportedStats) {
-      if (parameters.containsKey(statType)) {
-        parameters.remove(statType);
-      }
-    }
-  }
-
   private String toString(Map<String, String> parameters) {
     StringBuilder builder = new StringBuilder();
     for (String statType : StatsSetupConst.supportedStats) {

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index f4a9772..2ca4d1e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -1453,10 +1453,10 @@ public class Hive {
   public void loadPartition(Path loadPath, String tableName,
       Map<String, String> partSpec, boolean replace,
       boolean inheritTableSpecs, boolean isSkewedStoreAsSubdir,
-      boolean isSrcLocal, boolean isAcid) throws HiveException {
+      boolean isSrcLocal, boolean isAcid, boolean hasFollowingStatsTask) throws HiveException {
     Table tbl = getTable(tableName);
     loadPartition(loadPath, tbl, partSpec, replace, inheritTableSpecs,
-        isSkewedStoreAsSubdir, isSrcLocal, isAcid);
+        isSkewedStoreAsSubdir, isSrcLocal, isAcid, hasFollowingStatsTask);
   }
 
   /**
@@ -1483,7 +1483,7 @@ public class Hive {
   public Partition loadPartition(Path loadPath, Table tbl,
       Map<String, String> partSpec, boolean replace,
       boolean inheritTableSpecs, boolean isSkewedStoreAsSubdir,
-      boolean isSrcLocal, boolean isAcid) throws HiveException {
+      boolean isSrcLocal, boolean isAcid, boolean hasFollowingStatsTask) throws HiveException {
     Path tblDataLocationPath =  tbl.getDataLocation();
     try {
       /**
@@ -1562,10 +1562,19 @@ public class Hive {
       }
       if (oldPart == null) {
         newTPart.getTPartition().setParameters(new HashMap<String,String>());
+        if (this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
+          StatsSetupConst.setBasicStatsStateForCreateTable(newTPart.getParameters(),
+              StatsSetupConst.TRUE);
+        }
         MetaStoreUtils.populateQuickStats(HiveStatsUtils.getFileStatusRecurse(newPartPath, -1, newPartPath.getFileSystem(conf)), newTPart.getParameters());
         getMSC().add_partition(newTPart.getTPartition());
       } else {
-        alterPartition(tbl.getDbName(), tbl.getTableName(), new Partition(tbl, newTPart.getTPartition()), null);
+        EnvironmentContext environmentContext = null;
+        if (hasFollowingStatsTask) {
+          environmentContext = new EnvironmentContext();
+          environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
+        }
+        alterPartition(tbl.getDbName(), tbl.getTableName(), new Partition(tbl, newTPart.getTPartition()), environmentContext);
       }
       return newTPart;
     } catch (IOException e) {
@@ -1683,7 +1692,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
    */
   public Map<Map<String, String>, Partition> loadDynamicPartitions(Path loadPath,
       String tableName, Map<String, String> partSpec, boolean replace,
-      int numDP, boolean listBucketingEnabled, boolean isAcid, long txnId)
+      int numDP, boolean listBucketingEnabled, boolean isAcid, long txnId, boolean hasFollowingStatsTask)
       throws HiveException {
 
     Set<Path> validPartitions = new HashSet<Path>();
@@ -1733,7 +1742,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
         LinkedHashMap<String, String> fullPartSpec = new LinkedHashMap<String, String>(partSpec);
         Warehouse.makeSpecFromName(fullPartSpec, partPath);
         Partition newPartition = loadPartition(partPath, tbl, fullPartSpec, replace,
-            true, listBucketingEnabled, false, isAcid);
+            true, listBucketingEnabled, false, isAcid, hasFollowingStatsTask);
         partitionsMap.put(fullPartSpec, newPartition);
         if (inPlaceEligible) {
           InPlaceUpdates.rePositionCursor(ps);
@@ -1772,10 +1781,12 @@ private void constructOneLBLocationMap(FileStatus fSta,
    *          If the source directory is LOCAL
    * @param isSkewedStoreAsSubdir
    *          if list bucketing enabled
+   * @param hasFollowingStatsTask
+   *          if there is any following stats task
    * @param isAcid true if this is an ACID based write
    */
-  public void loadTable(Path loadPath, String tableName, boolean replace,
-      boolean isSrcLocal, boolean isSkewedStoreAsSubdir, boolean isAcid)
+  public void loadTable(Path loadPath, String tableName, boolean replace, boolean isSrcLocal,
+      boolean isSkewedStoreAsSubdir, boolean isAcid, boolean hasFollowingStatsTask)
       throws HiveException {
 
     List<Path> newFiles = null;
@@ -1817,8 +1828,13 @@ private void constructOneLBLocationMap(FileStatus fSta,
       throw new HiveException(e);
     }
 
+    EnvironmentContext environmentContext = null;
+    if (hasFollowingStatsTask) {
+      environmentContext = new EnvironmentContext();
+      environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
+    }
     try {
-      alterTable(tableName, tbl, null);
+      alterTable(tableName, tbl, environmentContext);
     } catch (InvalidOperationException e) {
       throw new HiveException(e);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 4a6617f..87a4b7b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -74,6 +74,7 @@ import org.apache.hadoop.hive.ql.parse.authorization.AuthorizationParseUtils;
 import org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactory;
 import org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl;
 import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
+import org.apache.hadoop.hive.ql.plan.AddPartitionDesc.OnePartitionDesc;
 import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.AlterIndexDesc;
 import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes;
@@ -2803,6 +2804,19 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
       addPartitionDesc.addPartition(currentPart, currentLocation);
     }
 
+    if (this.conf.getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
+      for (int index = 0; index < addPartitionDesc.getPartitionCount(); index++) {
+        OnePartitionDesc desc = addPartitionDesc.getPartition(index);
+        if (desc.getLocation() == null) {
+          if (desc.getPartParams() == null) {
+            desc.setPartParams(new HashMap<String, String>());
+          }
+          StatsSetupConst.setBasicStatsStateForCreateTable(desc.getPartParams(),
+              StatsSetupConst.TRUE);
+        }
+      }
+    }
+
     if (addPartitionDesc.getPartitionCount() == 0) {
       // nothing to do
       return;

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
index 500c7ed..d562ddf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
@@ -287,7 +287,7 @@ public class ImportSemanticAnalyzer extends BaseSemanticAnalyzer {
   private CreateTableDesc getBaseCreateTableDescFromTable(String dbName,
       org.apache.hadoop.hive.metastore.api.Table table) {
     if ((table.getPartitionKeys() == null) || (table.getPartitionKeys().size() == 0)){
-      table.putToParameters(StatsSetupConst.DO_NOT_UPDATE_STATS,"true");
+      table.putToParameters(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
     }
     CreateTableDesc tblDesc = new CreateTableDesc(
         dbName,

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
index 2dc4e11..bf808c3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
@@ -25,6 +25,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -810,7 +811,17 @@ public class CreateTableDesc extends DDLDesc implements Serializable {
         }
       }
     }
+    if (getLocation() == null && !this.isCTAS) {
+      if (!tbl.isPartitioned() && conf.getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
+        StatsSetupConst.setBasicStatsStateForCreateTable(tbl.getTTable().getParameters(),
+            StatsSetupConst.TRUE);
+      }
+    } else {
+      StatsSetupConst.setBasicStatsStateForCreateTable(tbl.getTTable().getParameters(),
+          StatsSetupConst.FALSE);
+    }
     return tbl;
   }
 
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
index 667d5c2..71dfc50 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
@@ -144,7 +144,7 @@ public class TestExecDriver extends TestCase {
         db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true);
         db.createTable(src, cols, null, TextInputFormat.class,
             HiveIgnoreKeyTextOutputFormat.class);
-        db.loadTable(hadoopDataFile[i], src, false, true, false, false);
+        db.loadTable(hadoopDataFile[i], src, false, true, false, false, false);
         i++;
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/queries/clientpositive/alter_table_add_partition.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/alter_table_add_partition.q b/ql/src/test/queries/clientpositive/alter_table_add_partition.q
new file mode 100644
index 0000000..54c839b
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/alter_table_add_partition.q
@@ -0,0 +1,13 @@
+create table mp (a int) partitioned by (b int);
+
+desc formatted mp;
+
+alter table mp add partition (b=1);
+
+desc formatted mp;
+desc formatted mp partition (b=1);
+
+insert into mp partition (b=1) values (1);
+
+desc formatted mp;
+desc formatted mp partition (b=1);

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/queries/clientpositive/insert_values_orig_table_use_metadata.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/insert_values_orig_table_use_metadata.q b/ql/src/test/queries/clientpositive/insert_values_orig_table_use_metadata.q
new file mode 100644
index 0000000..73f5243
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/insert_values_orig_table_use_metadata.q
@@ -0,0 +1,121 @@
+set hive.support.concurrency=true;
+set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
+set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
+set hive.compute.query.using.stats=true;
+
+create table acid_ivot(
+    ctinyint TINYINT,
+    csmallint SMALLINT,
+    cint INT,
+    cbigint BIGINT,
+    cfloat FLOAT,
+    cdouble DOUBLE,
+    cstring1 STRING,
+    cstring2 STRING,
+    ctimestamp1 TIMESTAMP,
+    ctimestamp2 TIMESTAMP,
+    cboolean1 BOOLEAN,
+    cboolean2 BOOLEAN) clustered by (cint) into 1 buckets stored as orc TBLPROPERTIES ('transactional'='true');
+
+desc formatted acid_ivot;
+
+LOAD DATA LOCAL INPATH "../../data/files/alltypesorc" into table acid_ivot;
+
+desc formatted acid_ivot;
+
+explain select count(*) from acid_ivot;
+
+select count(*) from acid_ivot;
+
+insert into table acid_ivot values
+        (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true),
+        (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true);
+
+desc formatted acid_ivot;
+
+explain select count(*) from acid_ivot;
+
+select count(*) from acid_ivot;
+
+drop table acid_ivot;
+
+create table acid_ivot(
+    ctinyint TINYINT,
+    csmallint SMALLINT,
+    cint INT,
+    cbigint BIGINT,
+    cfloat FLOAT,
+    cdouble DOUBLE,
+    cstring1 STRING,
+    cstring2 STRING,
+    ctimestamp1 TIMESTAMP,
+    ctimestamp2 TIMESTAMP,
+    cboolean1 BOOLEAN,
+    cboolean2 BOOLEAN) clustered by (cint) into 1 buckets stored as orc TBLPROPERTIES ('transactional'='true');
+
+insert into table acid_ivot values
+        (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true),
+        (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true);
+
+desc formatted acid_ivot;
+
+explain select count(*) from acid_ivot;
+
+select count(*) from acid_ivot;
+
+insert into table acid_ivot values
+        (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true),
+        (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true);
+
+desc formatted acid_ivot;
+
+explain select count(*) from acid_ivot;
+
+select count(*) from acid_ivot;
+
+LOAD DATA LOCAL INPATH "../../data/files/alltypesorc" into table acid_ivot;
+
+desc formatted acid_ivot;
+
+explain select count(*) from acid_ivot;
+
+drop table acid_ivot;
+
+create table acid_ivot like src;
+
+desc formatted acid_ivot;
+
+insert overwrite table acid_ivot select * from src;
+
+desc formatted acid_ivot;
+
+explain select count(*) from acid_ivot;
+
+select count(*) from acid_ivot;
+
+CREATE TABLE sp (key STRING COMMENT 'default', value STRING COMMENT 'default')
+PARTITIONED BY (ds STRING, hr STRING)
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "../../data/files/kv1.txt"
+OVERWRITE INTO TABLE sp PARTITION (ds="2008-04-08", hr="11");
+
+desc formatted sp PARTITION (ds="2008-04-08", hr="11");
+
+explain select count(*) from sp where ds="2008-04-08" and hr="11";
+
+select count(*) from sp where ds="2008-04-08" and hr="11";
+
+insert into table sp PARTITION (ds="2008-04-08", hr="11") values
+        ('1', '2'), ('3', '4');
+
+desc formatted sp PARTITION (ds="2008-04-08", hr="11");
+
+analyze table sp PARTITION (ds="2008-04-08", hr="11") compute statistics;
+
+desc formatted sp PARTITION (ds="2008-04-08", hr="11");
+
+explain select count(*) from sp where ds="2008-04-08" and hr="11";
+
+select count(*) from sp where ds="2008-04-08" and hr="11";
+

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/queries/clientpositive/stats20.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/stats20.q b/ql/src/test/queries/clientpositive/stats20.q
index 59701bd..79fd2b8 100644
--- a/ql/src/test/queries/clientpositive/stats20.q
+++ b/ql/src/test/queries/clientpositive/stats20.q
@@ -7,10 +7,12 @@ insert overwrite table stats_partitioned partition (ds='1')
 select * from src;
 -- rawDataSize is 5312 after config is turned on
 describe formatted stats_partitioned;
+describe formatted stats_partitioned partition (ds='1');
 
 set hive.stats.collect.rawdatasize=false;
 insert overwrite table stats_partitioned partition (ds='1')
 select * from src;
 -- rawDataSize is 0 after config is turned off
 describe formatted stats_partitioned;
+describe formatted stats_partitioned partition (ds='1');
 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientnegative/alter_file_format.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/alter_file_format.q.out b/ql/src/test/results/clientnegative/alter_file_format.q.out
index 96f1bfb..e3f3b4c 100644
--- a/ql/src/test/results/clientnegative/alter_file_format.q.out
+++ b/ql/src/test/results/clientnegative/alter_file_format.q.out
@@ -24,6 +24,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientnegative/unset_table_property.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/unset_table_property.q.out b/ql/src/test/results/clientnegative/unset_table_property.q.out
index 0510788..0705b92 100644
--- a/ql/src/test/results/clientnegative/unset_table_property.q.out
+++ b/ql/src/test/results/clientnegative/unset_table_property.q.out
@@ -22,6 +22,8 @@ a	1
 c	3
 #### A masked pattern was here ####
 numFiles	0
+numRows	0
+rawDataSize	0
 totalSize	0
 #### A masked pattern was here ####
 FAILED: SemanticException [Error 10215]: Please use the following syntax if not sure whether the property existed or not:

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/alter_file_format.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_file_format.q.out b/ql/src/test/results/clientpositive/alter_file_format.q.out
index 5d83b23..14dd892 100644
--- a/ql/src/test/results/clientpositive/alter_file_format.q.out
+++ b/ql/src/test/results/clientpositive/alter_file_format.q.out
@@ -24,6 +24,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -64,6 +69,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -105,6 +112,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -146,6 +155,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -187,6 +198,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -228,6 +241,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -269,6 +284,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -325,6 +342,11 @@ Database:           	default
 Table:              	alter_partition_format_test	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -368,8 +390,11 @@ Database:           	default
 Table:              	alter_partition_format_test	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -414,8 +439,11 @@ Database:           	default
 Table:              	alter_partition_format_test	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -460,8 +488,11 @@ Database:           	default
 Table:              	alter_partition_format_test	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -506,8 +537,11 @@ Database:           	default
 Table:              	alter_partition_format_test	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -552,8 +586,11 @@ Database:           	default
 Table:              	alter_partition_format_test	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out b/ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out
index 184d2e4..3234792 100644
--- a/ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out
+++ b/ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out
@@ -48,8 +48,11 @@ Database:           	default
 Table:              	alter_table_partition_clusterby_sortby	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -98,8 +101,11 @@ Database:           	default
 Table:              	alter_table_partition_clusterby_sortby	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -148,8 +154,11 @@ Database:           	default
 Table:              	alter_table_partition_clusterby_sortby	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/alter_skewed_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_skewed_table.q.out b/ql/src/test/results/clientpositive/alter_skewed_table.q.out
index a1caa99..0f60ba3 100644
--- a/ql/src/test/results/clientpositive/alter_skewed_table.q.out
+++ b/ql/src/test/results/clientpositive/alter_skewed_table.q.out
@@ -24,6 +24,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -64,6 +69,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -119,6 +126,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -159,6 +171,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
@@ -208,6 +222,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -250,6 +269,8 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/alter_table_add_partition.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_table_add_partition.q.out b/ql/src/test/results/clientpositive/alter_table_add_partition.q.out
new file mode 100644
index 0000000..1e5e396
--- /dev/null
+++ b/ql/src/test/results/clientpositive/alter_table_add_partition.q.out
@@ -0,0 +1,202 @@
+PREHOOK: query: create table mp (a int) partitioned by (b int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@mp
+POSTHOOK: query: create table mp (a int) partitioned by (b int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@mp
+PREHOOK: query: desc formatted mp
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@mp
+POSTHOOK: query: desc formatted mp
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@mp
+# col_name            	data_type           	comment             
+	 	 
+a                   	int                 	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+b                   	int                 	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: alter table mp add partition (b=1)
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@mp
+POSTHOOK: query: alter table mp add partition (b=1)
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@mp
+POSTHOOK: Output: default@mp@b=1
+PREHOOK: query: desc formatted mp
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@mp
+POSTHOOK: query: desc formatted mp
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@mp
+# col_name            	data_type           	comment             
+	 	 
+a                   	int                 	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+b                   	int                 	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: desc formatted mp partition (b=1)
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@mp
+POSTHOOK: query: desc formatted mp partition (b=1)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@mp
+# col_name            	data_type           	comment             
+	 	 
+a                   	int                 	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+b                   	int                 	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[1]                 	 
+Database:           	default             	 
+Table:              	mp                  	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: insert into mp partition (b=1) values (1)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@mp@b=1
+POSTHOOK: query: insert into mp partition (b=1) values (1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@mp@b=1
+POSTHOOK: Lineage: mp PARTITION(b=1).a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: desc formatted mp
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@mp
+POSTHOOK: query: desc formatted mp
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@mp
+# col_name            	data_type           	comment             
+	 	 
+a                   	int                 	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+b                   	int                 	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: desc formatted mp partition (b=1)
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@mp
+POSTHOOK: query: desc formatted mp partition (b=1)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@mp
+# col_name            	data_type           	comment             
+	 	 
+a                   	int                 	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+b                   	int                 	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[1]                 	 
+Database:           	default             	 
+Table:              	mp                  	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	1                   
+	numRows             	1                   
+	rawDataSize         	1                   
+	totalSize           	2                   
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out b/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out
index 6e1ec59..566b804 100644
--- a/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out
+++ b/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out
@@ -24,7 +24,12 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 	SORTBUCKETCOLSPREFIX	TRUE                
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -66,6 +71,8 @@ Table Parameters:
 	SORTBUCKETCOLSPREFIX	TRUE                
 #### A masked pattern was here ####
 	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
 	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
index 0902556..b1d2b23 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
@@ -166,8 +166,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -211,8 +213,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -355,8 +359,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -400,8 +406,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -521,8 +529,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -623,8 +633,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -668,8 +680,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -712,8 +726,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -794,8 +810,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -838,8 +856,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -940,8 +960,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -985,8 +1007,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1029,8 +1053,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1140,8 +1166,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1185,8 +1213,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
index 81de2b0..82a8e93 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
@@ -133,8 +133,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -237,8 +239,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -281,8 +285,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -325,8 +331,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -449,8 +457,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -553,8 +563,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -597,8 +609,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -641,8 +655,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -758,8 +774,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -862,8 +880,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -906,8 +926,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1023,8 +1045,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1069,8 +1093,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1111,8 +1137,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_big
                     numFiles 4
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_big { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1234,8 +1262,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -1278,8 +1308,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out
index 26a11a7..d8eacbe 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out
@@ -171,8 +171,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_small
                     numFiles 2
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_small { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -219,8 +221,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_medium
                     numFiles 3
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_medium { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -267,8 +271,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
                     name default.bucket_medium
                     numFiles 3
+                    numRows 0
                     partition_columns ds
                     partition_columns.types string
+                    rawDataSize 0
                     serialization.ddl struct bucket_medium { string key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -414,8 +420,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -459,8 +467,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_big
               numFiles 4
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_big { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -504,8 +514,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_medium
               numFiles 3
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_medium { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
@@ -549,8 +561,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.bucket_small
               numFiles 2
+              numRows 0
               partition_columns ds
               partition_columns.types string
+              rawDataSize 0
               serialization.ddl struct bucket_small { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe


[16/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/insert_values_orig_table_use_metadata.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/insert_values_orig_table_use_metadata.q.out b/ql/src/test/results/clientpositive/insert_values_orig_table_use_metadata.q.out
new file mode 100644
index 0000000..7356239
--- /dev/null
+++ b/ql/src/test/results/clientpositive/insert_values_orig_table_use_metadata.q.out
@@ -0,0 +1,994 @@
+PREHOOK: query: create table acid_ivot(
+    ctinyint TINYINT,
+    csmallint SMALLINT,
+    cint INT,
+    cbigint BIGINT,
+    cfloat FLOAT,
+    cdouble DOUBLE,
+    cstring1 STRING,
+    cstring2 STRING,
+    ctimestamp1 TIMESTAMP,
+    ctimestamp2 TIMESTAMP,
+    cboolean1 BOOLEAN,
+    cboolean2 BOOLEAN) clustered by (cint) into 1 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@acid_ivot
+POSTHOOK: query: create table acid_ivot(
+    ctinyint TINYINT,
+    csmallint SMALLINT,
+    cint INT,
+    cbigint BIGINT,
+    cfloat FLOAT,
+    cdouble DOUBLE,
+    cstring1 STRING,
+    cstring2 STRING,
+    ctimestamp1 TIMESTAMP,
+    ctimestamp2 TIMESTAMP,
+    cboolean1 BOOLEAN,
+    cboolean2 BOOLEAN) clustered by (cint) into 1 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@acid_ivot
+PREHOOK: query: desc formatted acid_ivot
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@acid_ivot
+POSTHOOK: query: desc formatted acid_ivot
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@acid_ivot
+# col_name            	data_type           	comment             
+	 	 
+ctinyint            	tinyint             	                    
+csmallint           	smallint            	                    
+cint                	int                 	                    
+cbigint             	bigint              	                    
+cfloat              	float               	                    
+cdouble             	double              	                    
+cstring1            	string              	                    
+cstring2            	string              	                    
+ctimestamp1         	timestamp           	                    
+ctimestamp2         	timestamp           	                    
+cboolean1           	boolean             	                    
+cboolean2           	boolean             	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
+	transactional       	true                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	1                   	 
+Bucket Columns:     	[cint]              	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/alltypesorc" into table acid_ivot
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@acid_ivot
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/alltypesorc" into table acid_ivot
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@acid_ivot
+PREHOOK: query: desc formatted acid_ivot
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@acid_ivot
+POSTHOOK: query: desc formatted acid_ivot
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@acid_ivot
+# col_name            	data_type           	comment             
+	 	 
+ctinyint            	tinyint             	                    
+csmallint           	smallint            	                    
+cint                	int                 	                    
+cbigint             	bigint              	                    
+cfloat              	float               	                    
+cdouble             	double              	                    
+cstring1            	string              	                    
+cstring2            	string              	                    
+ctimestamp1         	timestamp           	                    
+ctimestamp2         	timestamp           	                    
+cboolean1           	boolean             	                    
+cboolean2           	boolean             	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	numFiles            	1                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	377237              
+	transactional       	true                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	1                   	 
+Bucket Columns:     	[cint]              	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: explain select count(*) from acid_ivot
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select count(*) from acid_ivot
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: acid_ivot
+            Statistics: Num rows: 1 Data size: 377237 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              Statistics: Num rows: 1 Data size: 377237 Basic stats: COMPLETE Column stats: COMPLETE
+              Group By Operator
+                aggregations: count()
+                mode: hash
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from acid_ivot
+PREHOOK: type: QUERY
+PREHOOK: Input: default@acid_ivot
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from acid_ivot
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@acid_ivot
+#### A masked pattern was here ####
+12288
+PREHOOK: query: insert into table acid_ivot values
+        (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true),
+        (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@acid_ivot
+POSTHOOK: query: insert into table acid_ivot values
+        (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true),
+        (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@acid_ivot
+POSTHOOK: Lineage: acid_ivot.cbigint EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col4, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cboolean1 EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col11, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cboolean2 EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col12, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cdouble EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col6, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cfloat EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col5, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cint EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.csmallint EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cstring1 SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col7, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cstring2 SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col8, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.ctimestamp1 EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col9, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.ctimestamp2 EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col10, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.ctinyint EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: desc formatted acid_ivot
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@acid_ivot
+POSTHOOK: query: desc formatted acid_ivot
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@acid_ivot
+# col_name            	data_type           	comment             
+	 	 
+ctinyint            	tinyint             	                    
+csmallint           	smallint            	                    
+cint                	int                 	                    
+cbigint             	bigint              	                    
+cfloat              	float               	                    
+cdouble             	double              	                    
+cstring1            	string              	                    
+cstring2            	string              	                    
+ctimestamp1         	timestamp           	                    
+ctimestamp2         	timestamp           	                    
+cboolean1           	boolean             	                    
+cboolean2           	boolean             	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	numFiles            	2                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	378741              
+	transactional       	true                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	1                   	 
+Bucket Columns:     	[cint]              	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: explain select count(*) from acid_ivot
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select count(*) from acid_ivot
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: acid_ivot
+            Statistics: Num rows: 1 Data size: 378741 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              Statistics: Num rows: 1 Data size: 378741 Basic stats: COMPLETE Column stats: COMPLETE
+              Group By Operator
+                aggregations: count()
+                mode: hash
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from acid_ivot
+PREHOOK: type: QUERY
+PREHOOK: Input: default@acid_ivot
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from acid_ivot
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@acid_ivot
+#### A masked pattern was here ####
+12290
+PREHOOK: query: drop table acid_ivot
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@acid_ivot
+PREHOOK: Output: default@acid_ivot
+POSTHOOK: query: drop table acid_ivot
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@acid_ivot
+POSTHOOK: Output: default@acid_ivot
+PREHOOK: query: create table acid_ivot(
+    ctinyint TINYINT,
+    csmallint SMALLINT,
+    cint INT,
+    cbigint BIGINT,
+    cfloat FLOAT,
+    cdouble DOUBLE,
+    cstring1 STRING,
+    cstring2 STRING,
+    ctimestamp1 TIMESTAMP,
+    ctimestamp2 TIMESTAMP,
+    cboolean1 BOOLEAN,
+    cboolean2 BOOLEAN) clustered by (cint) into 1 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@acid_ivot
+POSTHOOK: query: create table acid_ivot(
+    ctinyint TINYINT,
+    csmallint SMALLINT,
+    cint INT,
+    cbigint BIGINT,
+    cfloat FLOAT,
+    cdouble DOUBLE,
+    cstring1 STRING,
+    cstring2 STRING,
+    ctimestamp1 TIMESTAMP,
+    ctimestamp2 TIMESTAMP,
+    cboolean1 BOOLEAN,
+    cboolean2 BOOLEAN) clustered by (cint) into 1 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@acid_ivot
+PREHOOK: query: insert into table acid_ivot values
+        (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true),
+        (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@acid_ivot
+POSTHOOK: query: insert into table acid_ivot values
+        (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true),
+        (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@acid_ivot
+POSTHOOK: Lineage: acid_ivot.cbigint EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col4, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cboolean1 EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col11, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cboolean2 EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col12, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cdouble EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col6, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cfloat EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col5, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cint EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.csmallint EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cstring1 SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col7, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cstring2 SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col8, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.ctimestamp1 EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col9, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.ctimestamp2 EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col10, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.ctinyint EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: desc formatted acid_ivot
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@acid_ivot
+POSTHOOK: query: desc formatted acid_ivot
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@acid_ivot
+# col_name            	data_type           	comment             
+	 	 
+ctinyint            	tinyint             	                    
+csmallint           	smallint            	                    
+cint                	int                 	                    
+cbigint             	bigint              	                    
+cfloat              	float               	                    
+cdouble             	double              	                    
+cstring1            	string              	                    
+cstring2            	string              	                    
+ctimestamp1         	timestamp           	                    
+ctimestamp2         	timestamp           	                    
+cboolean1           	boolean             	                    
+cboolean2           	boolean             	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	1                   
+	numRows             	2                   
+	rawDataSize         	0                   
+	totalSize           	1508                
+	transactional       	true                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	1                   	 
+Bucket Columns:     	[cint]              	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: explain select count(*) from acid_ivot
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select count(*) from acid_ivot
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from acid_ivot
+PREHOOK: type: QUERY
+PREHOOK: Input: default@acid_ivot
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from acid_ivot
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@acid_ivot
+#### A masked pattern was here ####
+2
+PREHOOK: query: insert into table acid_ivot values
+        (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true),
+        (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__3
+PREHOOK: Output: default@acid_ivot
+POSTHOOK: query: insert into table acid_ivot values
+        (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true),
+        (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__3
+POSTHOOK: Output: default@acid_ivot
+POSTHOOK: Lineage: acid_ivot.cbigint EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col4, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cboolean1 EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col11, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cboolean2 EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col12, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cdouble EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col6, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cfloat EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col5, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cint EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.csmallint EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cstring1 SIMPLE [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col7, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.cstring2 SIMPLE [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col8, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.ctimestamp1 EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col9, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.ctimestamp2 EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col10, type:string, comment:), ]
+POSTHOOK: Lineage: acid_ivot.ctinyint EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: desc formatted acid_ivot
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@acid_ivot
+POSTHOOK: query: desc formatted acid_ivot
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@acid_ivot
+# col_name            	data_type           	comment             
+	 	 
+ctinyint            	tinyint             	                    
+csmallint           	smallint            	                    
+cint                	int                 	                    
+cbigint             	bigint              	                    
+cfloat              	float               	                    
+cdouble             	double              	                    
+cstring1            	string              	                    
+cstring2            	string              	                    
+ctimestamp1         	timestamp           	                    
+ctimestamp2         	timestamp           	                    
+cboolean1           	boolean             	                    
+cboolean2           	boolean             	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	2                   
+	numRows             	4                   
+	rawDataSize         	0                   
+	totalSize           	3016                
+	transactional       	true                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	1                   	 
+Bucket Columns:     	[cint]              	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: explain select count(*) from acid_ivot
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select count(*) from acid_ivot
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from acid_ivot
+PREHOOK: type: QUERY
+PREHOOK: Input: default@acid_ivot
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from acid_ivot
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@acid_ivot
+#### A masked pattern was here ####
+4
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/alltypesorc" into table acid_ivot
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@acid_ivot
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/alltypesorc" into table acid_ivot
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@acid_ivot
+PREHOOK: query: desc formatted acid_ivot
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@acid_ivot
+POSTHOOK: query: desc formatted acid_ivot
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@acid_ivot
+# col_name            	data_type           	comment             
+	 	 
+ctinyint            	tinyint             	                    
+csmallint           	smallint            	                    
+cint                	int                 	                    
+cbigint             	bigint              	                    
+cfloat              	float               	                    
+cdouble             	double              	                    
+cstring1            	string              	                    
+cstring2            	string              	                    
+ctimestamp1         	timestamp           	                    
+ctimestamp2         	timestamp           	                    
+cboolean1           	boolean             	                    
+cboolean2           	boolean             	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	numFiles            	3                   
+	numRows             	4                   
+	rawDataSize         	0                   
+	totalSize           	380253              
+	transactional       	true                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	1                   	 
+Bucket Columns:     	[cint]              	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: explain select count(*) from acid_ivot
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select count(*) from acid_ivot
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: acid_ivot
+            Statistics: Num rows: 4 Data size: 380253 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              Statistics: Num rows: 4 Data size: 380253 Basic stats: COMPLETE Column stats: COMPLETE
+              Group By Operator
+                aggregations: count()
+                mode: hash
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: drop table acid_ivot
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@acid_ivot
+PREHOOK: Output: default@acid_ivot
+POSTHOOK: query: drop table acid_ivot
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@acid_ivot
+POSTHOOK: Output: default@acid_ivot
+PREHOOK: query: create table acid_ivot like src
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@acid_ivot
+POSTHOOK: query: create table acid_ivot like src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@acid_ivot
+PREHOOK: query: desc formatted acid_ivot
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@acid_ivot
+POSTHOOK: query: desc formatted acid_ivot
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@acid_ivot
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: insert overwrite table acid_ivot select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@acid_ivot
+POSTHOOK: query: insert overwrite table acid_ivot select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@acid_ivot
+POSTHOOK: Lineage: acid_ivot.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: acid_ivot.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc formatted acid_ivot
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@acid_ivot
+POSTHOOK: query: desc formatted acid_ivot
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@acid_ivot
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	1                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: explain select count(*) from acid_ivot
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select count(*) from acid_ivot
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from acid_ivot
+PREHOOK: type: QUERY
+PREHOOK: Input: default@acid_ivot
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from acid_ivot
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@acid_ivot
+#### A masked pattern was here ####
+500
+PREHOOK: query: CREATE TABLE sp (key STRING COMMENT 'default', value STRING COMMENT 'default')
+PARTITIONED BY (ds STRING, hr STRING)
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sp
+POSTHOOK: query: CREATE TABLE sp (key STRING COMMENT 'default', value STRING COMMENT 'default')
+PARTITIONED BY (ds STRING, hr STRING)
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@sp
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/kv1.txt"
+OVERWRITE INTO TABLE sp PARTITION (ds="2008-04-08", hr="11")
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@sp
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/kv1.txt"
+OVERWRITE INTO TABLE sp PARTITION (ds="2008-04-08", hr="11")
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@sp
+POSTHOOK: Output: default@sp@ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted sp PARTITION (ds="2008-04-08", hr="11")
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@sp
+POSTHOOK: query: desc formatted sp PARTITION (ds="2008-04-08", hr="11")
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@sp
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	sp                  	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	numFiles            	1                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: explain select count(*) from sp where ds="2008-04-08" and hr="11"
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select count(*) from sp where ds="2008-04-08" and hr="11"
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: sp
+            Statistics: Num rows: 1 Data size: 5812 Basic stats: PARTIAL Column stats: NONE
+            Select Operator
+              Statistics: Num rows: 1 Data size: 5812 Basic stats: PARTIAL Column stats: NONE
+              Group By Operator
+                aggregations: count()
+                mode: hash
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from sp where ds="2008-04-08" and hr="11"
+PREHOOK: type: QUERY
+PREHOOK: Input: default@sp
+PREHOOK: Input: default@sp@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from sp where ds="2008-04-08" and hr="11"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@sp
+POSTHOOK: Input: default@sp@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+500
+PREHOOK: query: insert into table sp PARTITION (ds="2008-04-08", hr="11") values
+        ('1', '2'), ('3', '4')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__4
+PREHOOK: Output: default@sp@ds=2008-04-08/hr=11
+POSTHOOK: query: insert into table sp PARTITION (ds="2008-04-08", hr="11") values
+        ('1', '2'), ('3', '4')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__4
+POSTHOOK: Output: default@sp@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: sp PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: sp PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: desc formatted sp PARTITION (ds="2008-04-08", hr="11")
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@sp
+POSTHOOK: query: desc formatted sp PARTITION (ds="2008-04-08", hr="11")
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@sp
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	sp                  	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	numFiles            	2                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	5820                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: analyze table sp PARTITION (ds="2008-04-08", hr="11") compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: default@sp
+PREHOOK: Input: default@sp@ds=2008-04-08/hr=11
+PREHOOK: Output: default@sp
+PREHOOK: Output: default@sp@ds=2008-04-08/hr=11
+POSTHOOK: query: analyze table sp PARTITION (ds="2008-04-08", hr="11") compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@sp
+POSTHOOK: Input: default@sp@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@sp
+POSTHOOK: Output: default@sp@ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted sp PARTITION (ds="2008-04-08", hr="11")
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@sp
+POSTHOOK: query: desc formatted sp PARTITION (ds="2008-04-08", hr="11")
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@sp
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	sp                  	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	2                   
+	numRows             	502                 
+	rawDataSize         	5318                
+	totalSize           	5820                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: explain select count(*) from sp where ds="2008-04-08" and hr="11"
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select count(*) from sp where ds="2008-04-08" and hr="11"
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select count(*) from sp where ds="2008-04-08" and hr="11"
+PREHOOK: type: QUERY
+PREHOOK: Input: default@sp
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from sp where ds="2008-04-08" and hr="11"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@sp
+#### A masked pattern was here ####
+502

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/join17.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join17.q.out b/ql/src/test/results/clientpositive/join17.q.out
index 39c49a5..2c03584 100644
--- a/ql/src/test/results/clientpositive/join17.q.out
+++ b/ql/src/test/results/clientpositive/join17.q.out
@@ -143,15 +143,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key1,value1,key2,value2
                     columns.comments 
                     columns.types int:string:int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key1, string value1, i32 key2, string value2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -168,15 +173,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key1,value1,key2,value2
                 columns.comments 
                 columns.types int:string:int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key1, string value1, i32 key2, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/join26.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join26.q.out b/ql/src/test/results/clientpositive/join26.q.out
index 0fdc403..86e51fb 100644
--- a/ql/src/test/results/clientpositive/join26.q.out
+++ b/ql/src/test/results/clientpositive/join26.q.out
@@ -112,15 +112,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value,val2
                           columns.comments 
                           columns.types string:string:string
 #### A masked pattern was here ####
                           name default.dest_j1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct dest_j1 { string key, string value, string val2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.dest_j1
@@ -199,15 +204,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1
@@ -230,15 +240,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,val2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.dest_j1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest_j1 { string key, string value, string val2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest_j1
@@ -254,30 +269,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,val2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.dest_j1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest_j1 { string key, string value, string val2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1
@@ -299,15 +324,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,val2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.dest_j1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest_j1 { string key, string value, string val2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest_j1
@@ -323,30 +353,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,val2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.dest_j1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest_j1 { string key, string value, string val2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/join32.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join32.q.out b/ql/src/test/results/clientpositive/join32.q.out
index aad5463..8653c2f 100644
--- a/ql/src/test/results/clientpositive/join32.q.out
+++ b/ql/src/test/results/clientpositive/join32.q.out
@@ -128,15 +128,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value,val2
                               columns.comments 
                               columns.types string:string:string
 #### A masked pattern was here ####
                               name default.dest_j1
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest_j1 { string key, string value, string val2}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest_j1
@@ -294,15 +299,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/join32_lessSize.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join32_lessSize.q.out b/ql/src/test/results/clientpositive/join32_lessSize.q.out
index a94b951..fd7bba7 100644
--- a/ql/src/test/results/clientpositive/join32_lessSize.q.out
+++ b/ql/src/test/results/clientpositive/join32_lessSize.q.out
@@ -263,15 +263,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value,val2
                         columns.comments 
                         columns.types string:string:string
 #### A masked pattern was here ####
                         name default.dest_j1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct dest_j1 { string key, string value, string val2}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.dest_j1
@@ -359,15 +364,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1
@@ -1346,15 +1356,20 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
+                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                         bucket_count -1
                         columns key,value,val2
                         columns.comments 
                         columns.types string:string:string
 #### A masked pattern was here ####
                         name default.dest_j2
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
                         serialization.ddl struct dest_j2 { string key, string value, string val2}
                         serialization.format 1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
 #### A masked pattern was here ####
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.dest_j2
@@ -1444,15 +1459,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j2
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j2 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j2

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/join33.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join33.q.out b/ql/src/test/results/clientpositive/join33.q.out
index aad5463..8653c2f 100644
--- a/ql/src/test/results/clientpositive/join33.q.out
+++ b/ql/src/test/results/clientpositive/join33.q.out
@@ -128,15 +128,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value,val2
                               columns.comments 
                               columns.types string:string:string
 #### A masked pattern was here ####
                               name default.dest_j1
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest_j1 { string key, string value, string val2}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest_j1
@@ -294,15 +299,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/join34.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join34.q.out b/ql/src/test/results/clientpositive/join34.q.out
index 74fac5c..bb23644 100644
--- a/ql/src/test/results/clientpositive/join34.q.out
+++ b/ql/src/test/results/clientpositive/join34.q.out
@@ -108,15 +108,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value,val2
                               columns.comments 
                               columns.types string:string:string
 #### A masked pattern was here ####
                               name default.dest_j1
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest_j1 { string key, string value, string val2}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest_j1
@@ -161,15 +166,20 @@ STAGE PLANS:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                             properties:
+                              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                               bucket_count -1
                               columns key,value,val2
                               columns.comments 
                               columns.types string:string:string
 #### A masked pattern was here ####
                               name default.dest_j1
+                              numFiles 0
+                              numRows 0
+                              rawDataSize 0
                               serialization.ddl struct dest_j1 { string key, string value, string val2}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              totalSize 0
 #### A masked pattern was here ####
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: default.dest_j1
@@ -281,15 +291,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/join35.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join35.q.out b/ql/src/test/results/clientpositive/join35.q.out
index 6fe9cae..b1732ec 100644
--- a/ql/src/test/results/clientpositive/join35.q.out
+++ b/ql/src/test/results/clientpositive/join35.q.out
@@ -205,15 +205,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value,val2
                           columns.comments 
                           columns.types string:string:int
 #### A masked pattern was here ####
                           name default.dest_j1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct dest_j1 { string key, string value, i32 val2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.dest_j1
@@ -248,15 +253,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value,val2
                           columns.comments 
                           columns.types string:string:int
 #### A masked pattern was here ####
                           name default.dest_j1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct dest_j1 { string key, string value, i32 val2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.dest_j1
@@ -364,15 +374,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:int
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, i32 val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/join9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join9.q.out b/ql/src/test/results/clientpositive/join9.q.out
index f41d153..180d46c 100644
--- a/ql/src/test/results/clientpositive/join9.q.out
+++ b/ql/src/test/results/clientpositive/join9.q.out
@@ -189,15 +189,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
                     name default.dest1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest1 { i32 key, string value}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest1
@@ -214,15 +219,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
                 name default.dest1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/join_map_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join_map_ppr.q.out b/ql/src/test/results/clientpositive/join_map_ppr.q.out
index e720e65..928d4fb 100644
--- a/ql/src/test/results/clientpositive/join_map_ppr.q.out
+++ b/ql/src/test/results/clientpositive/join_map_ppr.q.out
@@ -114,15 +114,20 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
+                          COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                           bucket_count -1
                           columns key,value,val2
                           columns.comments 
                           columns.types string:string:string
 #### A masked pattern was here ####
                           name default.dest_j1
+                          numFiles 0
+                          numRows 0
+                          rawDataSize 0
                           serialization.ddl struct dest_j1 { string key, string value, string val2}
                           serialization.format 1
                           serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          totalSize 0
 #### A masked pattern was here ####
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.dest_j1
@@ -201,15 +206,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1
@@ -232,15 +242,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,val2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.dest_j1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest_j1 { string key, string value, string val2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest_j1
@@ -256,30 +271,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,val2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.dest_j1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest_j1 { string key, string value, string val2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1
@@ -301,15 +326,20 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
                     columns key,value,val2
                     columns.comments 
                     columns.types string:string:string
 #### A masked pattern was here ####
                     name default.dest_j1
+                    numFiles 0
+                    numRows 0
+                    rawDataSize 0
                     serialization.ddl struct dest_j1 { string key, string value, string val2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 0
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.dest_j1
@@ -325,30 +355,40 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
               columns key,value,val2
               columns.comments 
               columns.types string:string:string
 #### A masked pattern was here ####
               name default.dest_j1
+              numFiles 0
+              numRows 0
+              rawDataSize 0
               serialization.ddl struct dest_j1 { string key, string value, string val2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 0
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value,val2
                 columns.comments 
                 columns.types string:string:string
 #### A masked pattern was here ####
                 name default.dest_j1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest_j1

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out
index c063dc1..5016855 100644
--- a/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out
@@ -56,15 +56,20 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                       bucket_count -1
                       columns key,value
                       columns.comments 
                       columns.types string:string
 #### A masked pattern was here ####
                       name default.list_bucketing
+                      numFiles 0
+                      numRows 0
+                      rawDataSize 0
                       serialization.ddl struct list_bucketing { string key, string value}
                       serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 0
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.list_bucketing
@@ -130,15 +135,20 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
                 columns key,value
                 columns.comments 
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.list_bucketing
+                numFiles 0
+                numRows 0
+                rawDataSize 0
                 serialization.ddl struct list_bucketing { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.list_bucketing


[03/21] hive git commit: HIVE-13341: Stats state is not captured correctly: differentiate load table and create table (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

Posted by px...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/schema_evol_orc_nonvec_fetchwork_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/schema_evol_orc_nonvec_fetchwork_table.q.out b/ql/src/test/results/clientpositive/tez/schema_evol_orc_nonvec_fetchwork_table.q.out
index 8a9481f..4587156 100644
--- a/ql/src/test/results/clientpositive/tez/schema_evol_orc_nonvec_fetchwork_table.q.out
+++ b/ql/src/test/results/clientpositive/tez/schema_evol_orc_nonvec_fetchwork_table.q.out
@@ -48,6 +48,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -184,13 +189,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=99)
+        Select Operator [SEL_3] (rows=4 width=100)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=99)
+            Select Operator [SEL_1] (rows=4 width=100)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=99)
+              TableScan [TS_0] (rows=4 width=100)
                 default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right
@@ -294,6 +299,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -433,13 +443,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=153)
+        Select Operator [SEL_3] (rows=4 width=100)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=153)
+            Select Operator [SEL_1] (rows=4 width=100)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=153)
+              TableScan [TS_0] (rows=4 width=100)
                 default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right
@@ -697,13 +707,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=234)
+        Select Operator [SEL_3] (rows=4 width=414)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=234)
+            Select Operator [SEL_1] (rows=4 width=414)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=234)
+              TableScan [TS_0] (rows=4 width=414)
                 default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double order by insert_num
@@ -849,13 +859,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=9 width=530)
+        Select Operator [SEL_3] (rows=3 width=296)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=9 width=530)
+            Select Operator [SEL_1] (rows=3 width=296)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=9 width=530)
+              TableScan [TS_0] (rows=3 width=296)
                 default@table_change_date_group_string_group_timestamp,table_change_date_group_string_group_timestamp,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_date_group_string_group_timestamp order by insert_num
@@ -1017,13 +1027,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=9 width=529)
+        Select Operator [SEL_3] (rows=3 width=376)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=9 width=529)
+            Select Operator [SEL_1] (rows=3 width=376)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=9 width=529)
+              TableScan [TS_0] (rows=3 width=376)
                 default@table_change_date_group_string_group_date,table_change_date_group_string_group_date,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_date_group_string_group_date order by insert_num
@@ -1165,13 +1175,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=313)
+        Select Operator [SEL_3] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=313)
+            Select Operator [SEL_1] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=313)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_string,table_change_numeric_group_string_group_multi_ints_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_string order by insert_num
@@ -1306,13 +1316,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=422)
+        Select Operator [SEL_3] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=422)
+            Select Operator [SEL_1] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=422)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_char,table_change_numeric_group_string_group_multi_ints_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_char order by insert_num
@@ -1447,13 +1457,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=314)
+        Select Operator [SEL_3] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=314)
+            Select Operator [SEL_1] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=314)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_char_trunc,table_change_numeric_group_string_group_multi_ints_char_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_char_trunc order by insert_num
@@ -1588,13 +1598,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=313)
+        Select Operator [SEL_3] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=313)
+            Select Operator [SEL_1] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=313)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_varchar,table_change_numeric_group_string_group_multi_ints_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_varchar order by insert_num
@@ -1729,13 +1739,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=310)
+        Select Operator [SEL_3] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=310)
+            Select Operator [SEL_1] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=310)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_varchar_trunc,table_change_numeric_group_string_group_multi_ints_varchar_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_varchar_trunc order by insert_num
@@ -1868,13 +1878,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=304)
+        Select Operator [SEL_3] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=304)
+            Select Operator [SEL_1] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=304)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_string,table_change_numeric_group_string_group_floating_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_string order by insert_num
@@ -2007,13 +2017,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=383)
+        Select Operator [SEL_3] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=383)
+            Select Operator [SEL_1] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=383)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_char,table_change_numeric_group_string_group_floating_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_char order by insert_num
@@ -2146,13 +2156,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=306)
+        Select Operator [SEL_3] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=306)
+            Select Operator [SEL_1] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=306)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_char_trunc,table_change_numeric_group_string_group_floating_char_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_char_trunc order by insert_num
@@ -2285,13 +2295,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=304)
+        Select Operator [SEL_3] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=304)
+            Select Operator [SEL_1] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=304)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_varchar,table_change_numeric_group_string_group_floating_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_varchar order by insert_num
@@ -2424,13 +2434,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=302)
+        Select Operator [SEL_3] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=302)
+            Select Operator [SEL_1] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=302)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_varchar_trunc,table_change_numeric_group_string_group_floating_varchar_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_varchar_trunc order by insert_num
@@ -2573,13 +2583,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=496)
+        Select Operator [SEL_3] (rows=4 width=476)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=496)
+            Select Operator [SEL_1] (rows=4 width=476)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=496)
+              TableScan [TS_0] (rows=4 width=476)
                 default@table_change_string_group_string_group_string,table_change_string_group_string_group_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string order by insert_num
@@ -2712,13 +2722,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=427)
+        Select Operator [SEL_3] (rows=4 width=498)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=427)
+            Select Operator [SEL_1] (rows=4 width=498)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=427)
+              TableScan [TS_0] (rows=4 width=498)
                 default@table_change_string_group_string_group_char,table_change_string_group_string_group_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_string_group_char order by insert_num
@@ -2851,13 +2861,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=402)
+        Select Operator [SEL_3] (rows=4 width=381)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=402)
+            Select Operator [SEL_1] (rows=4 width=381)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=402)
+              TableScan [TS_0] (rows=4 width=381)
                 default@table_change_string_group_string_group_varchar,table_change_string_group_string_group_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_string_group_varchar order by insert_num
@@ -3004,13 +3014,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=183)
+        Select Operator [SEL_3] (rows=4 width=114)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=183)
+            Select Operator [SEL_1] (rows=4 width=114)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
-              TableScan [TS_0] (rows=10 width=183)
+              TableScan [TS_0] (rows=4 width=114)
                 default@table_change_lower_to_higher_numeric_group_tinyint,table_change_lower_to_higher_numeric_group_tinyint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,b from table_change_lower_to_higher_numeric_group_tinyint order by insert_num
@@ -3147,13 +3157,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=182)
+        Select Operator [SEL_3] (rows=4 width=116)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=182)
+            Select Operator [SEL_1] (rows=4 width=116)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=10 width=182)
+              TableScan [TS_0] (rows=4 width=116)
                 default@table_change_lower_to_higher_numeric_group_smallint,table_change_lower_to_higher_numeric_group_smallint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_lower_to_higher_numeric_group_smallint order by insert_num
@@ -3288,13 +3298,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=178)
+        Select Operator [SEL_3] (rows=4 width=112)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=178)
+            Select Operator [SEL_1] (rows=4 width=112)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=178)
+              TableScan [TS_0] (rows=4 width=112)
                 default@table_change_lower_to_higher_numeric_group_int,table_change_lower_to_higher_numeric_group_int,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_lower_to_higher_numeric_group_int order by insert_num
@@ -3427,13 +3437,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=177)
+        Select Operator [SEL_3] (rows=4 width=120)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=177)
+            Select Operator [SEL_1] (rows=4 width=120)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=177)
+              TableScan [TS_0] (rows=4 width=120)
                 default@table_change_lower_to_higher_numeric_group_bigint,table_change_lower_to_higher_numeric_group_bigint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_bigint order by insert_num
@@ -3564,13 +3574,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=189)
+        Select Operator [SEL_3] (rows=4 width=320)
           Output:["_col0","_col1","_col2","_col3"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=189)
+            Select Operator [SEL_1] (rows=4 width=320)
               Output:["_col0","_col1","_col2","_col3"]
-              TableScan [TS_0] (rows=10 width=189)
+              TableScan [TS_0] (rows=4 width=320)
                 default@table_change_lower_to_higher_numeric_group_decimal,table_change_lower_to_higher_numeric_group_decimal,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","b"]
 
 PREHOOK: query: select insert_num,c1,c2,b from table_change_lower_to_higher_numeric_group_decimal order by insert_num
@@ -3699,13 +3709,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=99)
+        Select Operator [SEL_3] (rows=4 width=100)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=99)
+            Select Operator [SEL_1] (rows=4 width=100)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=99)
+              TableScan [TS_0] (rows=4 width=100)
                 default@table_change_lower_to_higher_numeric_group_float,table_change_lower_to_higher_numeric_group_float,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","b"]
 
 PREHOOK: query: select insert_num,c1,b from table_change_lower_to_higher_numeric_group_float order by insert_num

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/schema_evol_orc_nonvec_mapwork_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/schema_evol_orc_nonvec_mapwork_table.q.out b/ql/src/test/results/clientpositive/tez/schema_evol_orc_nonvec_mapwork_table.q.out
index 39954b8..4889cdb 100644
--- a/ql/src/test/results/clientpositive/tez/schema_evol_orc_nonvec_mapwork_table.q.out
+++ b/ql/src/test/results/clientpositive/tez/schema_evol_orc_nonvec_mapwork_table.q.out
@@ -48,6 +48,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -184,13 +189,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=99)
+        Select Operator [SEL_3] (rows=4 width=100)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=99)
+            Select Operator [SEL_1] (rows=4 width=100)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=99)
+              TableScan [TS_0] (rows=4 width=100)
                 default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right
@@ -294,6 +299,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -433,13 +443,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=153)
+        Select Operator [SEL_3] (rows=4 width=100)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=153)
+            Select Operator [SEL_1] (rows=4 width=100)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=153)
+              TableScan [TS_0] (rows=4 width=100)
                 default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right
@@ -697,13 +707,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=234)
+        Select Operator [SEL_3] (rows=4 width=414)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=234)
+            Select Operator [SEL_1] (rows=4 width=414)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=234)
+              TableScan [TS_0] (rows=4 width=414)
                 default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double order by insert_num
@@ -849,13 +859,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=9 width=530)
+        Select Operator [SEL_3] (rows=3 width=296)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=9 width=530)
+            Select Operator [SEL_1] (rows=3 width=296)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=9 width=530)
+              TableScan [TS_0] (rows=3 width=296)
                 default@table_change_date_group_string_group_timestamp,table_change_date_group_string_group_timestamp,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_date_group_string_group_timestamp order by insert_num
@@ -1017,13 +1027,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=9 width=529)
+        Select Operator [SEL_3] (rows=3 width=376)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=9 width=529)
+            Select Operator [SEL_1] (rows=3 width=376)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=9 width=529)
+              TableScan [TS_0] (rows=3 width=376)
                 default@table_change_date_group_string_group_date,table_change_date_group_string_group_date,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_date_group_string_group_date order by insert_num
@@ -1165,13 +1175,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=313)
+        Select Operator [SEL_3] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=313)
+            Select Operator [SEL_1] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=313)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_string,table_change_numeric_group_string_group_multi_ints_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_string order by insert_num
@@ -1306,13 +1316,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=422)
+        Select Operator [SEL_3] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=422)
+            Select Operator [SEL_1] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=422)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_char,table_change_numeric_group_string_group_multi_ints_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_char order by insert_num
@@ -1447,13 +1457,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=314)
+        Select Operator [SEL_3] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=314)
+            Select Operator [SEL_1] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=314)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_char_trunc,table_change_numeric_group_string_group_multi_ints_char_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_char_trunc order by insert_num
@@ -1588,13 +1598,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=313)
+        Select Operator [SEL_3] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=313)
+            Select Operator [SEL_1] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=313)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_varchar,table_change_numeric_group_string_group_multi_ints_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_varchar order by insert_num
@@ -1729,13 +1739,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=310)
+        Select Operator [SEL_3] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=310)
+            Select Operator [SEL_1] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=310)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_varchar_trunc,table_change_numeric_group_string_group_multi_ints_varchar_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_varchar_trunc order by insert_num
@@ -1868,13 +1878,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=304)
+        Select Operator [SEL_3] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=304)
+            Select Operator [SEL_1] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=304)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_string,table_change_numeric_group_string_group_floating_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_string order by insert_num
@@ -2007,13 +2017,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=383)
+        Select Operator [SEL_3] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=383)
+            Select Operator [SEL_1] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=383)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_char,table_change_numeric_group_string_group_floating_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_char order by insert_num
@@ -2146,13 +2156,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=306)
+        Select Operator [SEL_3] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=306)
+            Select Operator [SEL_1] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=306)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_char_trunc,table_change_numeric_group_string_group_floating_char_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_char_trunc order by insert_num
@@ -2285,13 +2295,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=304)
+        Select Operator [SEL_3] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=304)
+            Select Operator [SEL_1] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=304)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_varchar,table_change_numeric_group_string_group_floating_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_varchar order by insert_num
@@ -2424,13 +2434,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=302)
+        Select Operator [SEL_3] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=302)
+            Select Operator [SEL_1] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=302)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_varchar_trunc,table_change_numeric_group_string_group_floating_varchar_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_varchar_trunc order by insert_num
@@ -2573,13 +2583,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=496)
+        Select Operator [SEL_3] (rows=4 width=476)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=496)
+            Select Operator [SEL_1] (rows=4 width=476)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=496)
+              TableScan [TS_0] (rows=4 width=476)
                 default@table_change_string_group_string_group_string,table_change_string_group_string_group_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string order by insert_num
@@ -2712,13 +2722,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=427)
+        Select Operator [SEL_3] (rows=4 width=498)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=427)
+            Select Operator [SEL_1] (rows=4 width=498)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=427)
+              TableScan [TS_0] (rows=4 width=498)
                 default@table_change_string_group_string_group_char,table_change_string_group_string_group_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_string_group_char order by insert_num
@@ -2851,13 +2861,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=402)
+        Select Operator [SEL_3] (rows=4 width=381)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=402)
+            Select Operator [SEL_1] (rows=4 width=381)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=402)
+              TableScan [TS_0] (rows=4 width=381)
                 default@table_change_string_group_string_group_varchar,table_change_string_group_string_group_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_string_group_varchar order by insert_num
@@ -3004,13 +3014,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=183)
+        Select Operator [SEL_3] (rows=4 width=114)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=183)
+            Select Operator [SEL_1] (rows=4 width=114)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
-              TableScan [TS_0] (rows=10 width=183)
+              TableScan [TS_0] (rows=4 width=114)
                 default@table_change_lower_to_higher_numeric_group_tinyint,table_change_lower_to_higher_numeric_group_tinyint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,b from table_change_lower_to_higher_numeric_group_tinyint order by insert_num
@@ -3147,13 +3157,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=182)
+        Select Operator [SEL_3] (rows=4 width=116)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=182)
+            Select Operator [SEL_1] (rows=4 width=116)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=10 width=182)
+              TableScan [TS_0] (rows=4 width=116)
                 default@table_change_lower_to_higher_numeric_group_smallint,table_change_lower_to_higher_numeric_group_smallint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_lower_to_higher_numeric_group_smallint order by insert_num
@@ -3288,13 +3298,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=178)
+        Select Operator [SEL_3] (rows=4 width=112)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=178)
+            Select Operator [SEL_1] (rows=4 width=112)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=178)
+              TableScan [TS_0] (rows=4 width=112)
                 default@table_change_lower_to_higher_numeric_group_int,table_change_lower_to_higher_numeric_group_int,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_lower_to_higher_numeric_group_int order by insert_num
@@ -3427,13 +3437,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=177)
+        Select Operator [SEL_3] (rows=4 width=120)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=177)
+            Select Operator [SEL_1] (rows=4 width=120)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=177)
+              TableScan [TS_0] (rows=4 width=120)
                 default@table_change_lower_to_higher_numeric_group_bigint,table_change_lower_to_higher_numeric_group_bigint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_bigint order by insert_num
@@ -3564,13 +3574,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=189)
+        Select Operator [SEL_3] (rows=4 width=320)
           Output:["_col0","_col1","_col2","_col3"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=189)
+            Select Operator [SEL_1] (rows=4 width=320)
               Output:["_col0","_col1","_col2","_col3"]
-              TableScan [TS_0] (rows=10 width=189)
+              TableScan [TS_0] (rows=4 width=320)
                 default@table_change_lower_to_higher_numeric_group_decimal,table_change_lower_to_higher_numeric_group_decimal,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","b"]
 
 PREHOOK: query: select insert_num,c1,c2,b from table_change_lower_to_higher_numeric_group_decimal order by insert_num
@@ -3699,13 +3709,13 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=10 width=99)
+        Select Operator [SEL_3] (rows=4 width=100)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE]
           SHUFFLE [RS_2]
-            Select Operator [SEL_1] (rows=10 width=99)
+            Select Operator [SEL_1] (rows=4 width=100)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=99)
+              TableScan [TS_0] (rows=4 width=100)
                 default@table_change_lower_to_higher_numeric_group_float,table_change_lower_to_higher_numeric_group_float,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","b"]
 
 PREHOOK: query: select insert_num,c1,b from table_change_lower_to_higher_numeric_group_float order by insert_num

http://git-wip-us.apache.org/repos/asf/hive/blob/244ce09c/ql/src/test/results/clientpositive/tez/schema_evol_orc_vec_mapwork_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/schema_evol_orc_vec_mapwork_table.q.out b/ql/src/test/results/clientpositive/tez/schema_evol_orc_vec_mapwork_table.q.out
index ca292b1..3833af7 100644
--- a/ql/src/test/results/clientpositive/tez/schema_evol_orc_vec_mapwork_table.q.out
+++ b/ql/src/test/results/clientpositive/tez/schema_evol_orc_vec_mapwork_table.q.out
@@ -48,6 +48,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -184,13 +189,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=99)
+        Select Operator [SEL_7] (rows=4 width=100)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=99)
+            Select Operator [SEL_5] (rows=4 width=100)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=99)
+              TableScan [TS_0] (rows=4 width=100)
                 default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right
@@ -294,6 +299,11 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -433,13 +443,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=153)
+        Select Operator [SEL_7] (rows=4 width=100)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=153)
+            Select Operator [SEL_5] (rows=4 width=100)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=153)
+              TableScan [TS_0] (rows=4 width=100)
                 default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right
@@ -697,13 +707,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=234)
+        Select Operator [SEL_7] (rows=4 width=414)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=234)
+            Select Operator [SEL_5] (rows=4 width=414)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=234)
+              TableScan [TS_0] (rows=4 width=414)
                 default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double order by insert_num
@@ -849,13 +859,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=9 width=530)
+        Select Operator [SEL_7] (rows=3 width=296)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=9 width=530)
+            Select Operator [SEL_5] (rows=3 width=296)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=9 width=530)
+              TableScan [TS_0] (rows=3 width=296)
                 default@table_change_date_group_string_group_timestamp,table_change_date_group_string_group_timestamp,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_date_group_string_group_timestamp order by insert_num
@@ -1017,13 +1027,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=9 width=529)
+        Select Operator [SEL_7] (rows=3 width=376)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=9 width=529)
+            Select Operator [SEL_5] (rows=3 width=376)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=9 width=529)
+              TableScan [TS_0] (rows=3 width=376)
                 default@table_change_date_group_string_group_date,table_change_date_group_string_group_date,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_date_group_string_group_date order by insert_num
@@ -1165,13 +1175,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=313)
+        Select Operator [SEL_7] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=313)
+            Select Operator [SEL_5] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=313)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_string,table_change_numeric_group_string_group_multi_ints_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_string order by insert_num
@@ -1306,13 +1316,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=422)
+        Select Operator [SEL_7] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=422)
+            Select Operator [SEL_5] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=422)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_char,table_change_numeric_group_string_group_multi_ints_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_char order by insert_num
@@ -1447,13 +1457,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=314)
+        Select Operator [SEL_7] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=314)
+            Select Operator [SEL_5] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=314)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_char_trunc,table_change_numeric_group_string_group_multi_ints_char_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_char_trunc order by insert_num
@@ -1588,13 +1598,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=313)
+        Select Operator [SEL_7] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=313)
+            Select Operator [SEL_5] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=313)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_varchar,table_change_numeric_group_string_group_multi_ints_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_varchar order by insert_num
@@ -1729,13 +1739,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=310)
+        Select Operator [SEL_7] (rows=4 width=115)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=310)
+            Select Operator [SEL_5] (rows=4 width=115)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=310)
+              TableScan [TS_0] (rows=4 width=115)
                 default@table_change_numeric_group_string_group_multi_ints_varchar_trunc,table_change_numeric_group_string_group_multi_ints_varchar_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_numeric_group_string_group_multi_ints_varchar_trunc order by insert_num
@@ -1868,13 +1878,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=304)
+        Select Operator [SEL_7] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=304)
+            Select Operator [SEL_5] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=304)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_string,table_change_numeric_group_string_group_floating_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_string order by insert_num
@@ -2007,13 +2017,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=383)
+        Select Operator [SEL_7] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=383)
+            Select Operator [SEL_5] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=383)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_char,table_change_numeric_group_string_group_floating_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_char order by insert_num
@@ -2146,13 +2156,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=306)
+        Select Operator [SEL_7] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=306)
+            Select Operator [SEL_5] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=306)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_char_trunc,table_change_numeric_group_string_group_floating_char_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_char_trunc order by insert_num
@@ -2285,13 +2295,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=304)
+        Select Operator [SEL_7] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=304)
+            Select Operator [SEL_5] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=304)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_varchar,table_change_numeric_group_string_group_floating_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_varchar order by insert_num
@@ -2424,13 +2434,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=302)
+        Select Operator [SEL_7] (rows=4 width=220)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=302)
+            Select Operator [SEL_5] (rows=4 width=220)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=302)
+              TableScan [TS_0] (rows=4 width=220)
                 default@table_change_numeric_group_string_group_floating_varchar_trunc,table_change_numeric_group_string_group_floating_varchar_trunc,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_numeric_group_string_group_floating_varchar_trunc order by insert_num
@@ -2573,13 +2583,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=496)
+        Select Operator [SEL_7] (rows=4 width=476)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=496)
+            Select Operator [SEL_5] (rows=4 width=476)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=496)
+              TableScan [TS_0] (rows=4 width=476)
                 default@table_change_string_group_string_group_string,table_change_string_group_string_group_string,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_string_group_string_group_string order by insert_num
@@ -2712,13 +2722,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=427)
+        Select Operator [SEL_7] (rows=4 width=498)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=427)
+            Select Operator [SEL_5] (rows=4 width=498)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=427)
+              TableScan [TS_0] (rows=4 width=498)
                 default@table_change_string_group_string_group_char,table_change_string_group_string_group_char,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_string_group_char order by insert_num
@@ -2851,13 +2861,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=402)
+        Select Operator [SEL_7] (rows=4 width=381)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=402)
+            Select Operator [SEL_5] (rows=4 width=381)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=402)
+              TableScan [TS_0] (rows=4 width=381)
                 default@table_change_string_group_string_group_varchar,table_change_string_group_string_group_varchar,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_string_group_varchar order by insert_num
@@ -3004,13 +3014,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=183)
+        Select Operator [SEL_7] (rows=4 width=114)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=183)
+            Select Operator [SEL_5] (rows=4 width=114)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
-              TableScan [TS_0] (rows=10 width=183)
+              TableScan [TS_0] (rows=4 width=114)
                 default@table_change_lower_to_higher_numeric_group_tinyint,table_change_lower_to_higher_numeric_group_tinyint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,b from table_change_lower_to_higher_numeric_group_tinyint order by insert_num
@@ -3147,13 +3157,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=182)
+        Select Operator [SEL_7] (rows=4 width=116)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=182)
+            Select Operator [SEL_5] (rows=4 width=116)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
-              TableScan [TS_0] (rows=10 width=182)
+              TableScan [TS_0] (rows=4 width=116)
                 default@table_change_lower_to_higher_numeric_group_smallint,table_change_lower_to_higher_numeric_group_smallint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,b from table_change_lower_to_higher_numeric_group_smallint order by insert_num
@@ -3288,13 +3298,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=178)
+        Select Operator [SEL_7] (rows=4 width=112)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=178)
+            Select Operator [SEL_5] (rows=4 width=112)
               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-              TableScan [TS_0] (rows=10 width=178)
+              TableScan [TS_0] (rows=4 width=112)
                 default@table_change_lower_to_higher_numeric_group_int,table_change_lower_to_higher_numeric_group_int,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,b from table_change_lower_to_higher_numeric_group_int order by insert_num
@@ -3427,13 +3437,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=177)
+        Select Operator [SEL_7] (rows=4 width=120)
           Output:["_col0","_col1","_col2","_col3","_col4"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=177)
+            Select Operator [SEL_5] (rows=4 width=120)
               Output:["_col0","_col1","_col2","_col3","_col4"]
-              TableScan [TS_0] (rows=10 width=177)
+              TableScan [TS_0] (rows=4 width=120)
                 default@table_change_lower_to_higher_numeric_group_bigint,table_change_lower_to_higher_numeric_group_bigint,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_lower_to_higher_numeric_group_bigint order by insert_num
@@ -3564,13 +3574,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=189)
+        Select Operator [SEL_7] (rows=4 width=320)
           Output:["_col0","_col1","_col2","_col3"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=189)
+            Select Operator [SEL_5] (rows=4 width=320)
               Output:["_col0","_col1","_col2","_col3"]
-              TableScan [TS_0] (rows=10 width=189)
+              TableScan [TS_0] (rows=4 width=320)
                 default@table_change_lower_to_higher_numeric_group_decimal,table_change_lower_to_higher_numeric_group_decimal,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","b"]
 
 PREHOOK: query: select insert_num,c1,c2,b from table_change_lower_to_higher_numeric_group_decimal order by insert_num
@@ -3699,13 +3709,13 @@ Stage-0
     Stage-1
       Reducer 2 vectorized
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=10 width=99)
+        Select Operator [SEL_7] (rows=4 width=100)
           Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] vectorized
           SHUFFLE [RS_6]
-            Select Operator [SEL_5] (rows=10 width=99)
+            Select Operator [SEL_5] (rows=4 width=100)
               Output:["_col0","_col1","_col2"]
-              TableScan [TS_0] (rows=10 width=99)
+              TableScan [TS_0] (rows=4 width=100)
                 default@table_change_lower_to_higher_numeric_group_float,table_change_lower_to_higher_numeric_group_float,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","b"]
 
 PREHOOK: query: select insert_num,c1,b from table_change_lower_to_higher_numeric_group_float order by insert_num