You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ct...@apache.org on 2017/01/31 14:47:16 UTC

[1/2] hive git commit: HIVE-15653: Some ALTER TABLE commands drop table stats (Chaoyu Tang, reviewed by PengCheng Xiong)

Repository: hive
Updated Branches:
  refs/heads/master 77dfbe0b5 -> 5468207e4


http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_table.q.out
index afeb41d..da99110 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_table.q.out
@@ -70,9 +70,9 @@ Stage-0
     Stage-1
       Map 1 llap
       File Output Operator [FS_2]
-        Select Operator [SEL_1] (rows=5 width=20)
+        Select Operator [SEL_1] (rows=6 width=20)
           Output:["_col0","_col1","_col2"]
-          TableScan [TS_0] (rows=5 width=20)
+          TableScan [TS_0] (rows=6 width=20)
             default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: select insert_num,a,b from table_add_int_permute_select
@@ -183,9 +183,9 @@ Stage-0
     Stage-1
       Map 1 llap
       File Output Operator [FS_2]
-        Select Operator [SEL_1] (rows=5 width=20)
+        Select Operator [SEL_1] (rows=6 width=21)
           Output:["_col0","_col1","_col2"]
-          TableScan [TS_0] (rows=5 width=20)
+          TableScan [TS_0] (rows=6 width=21)
             default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: select insert_num,a,b from table_add_int_string_permute_select
@@ -358,9 +358,9 @@ Stage-0
     Stage-1
       Map 1 llap
       File Output Operator [FS_2]
-        Select Operator [SEL_1] (rows=5 width=90)
+        Select Operator [SEL_1] (rows=6 width=80)
           Output:["_col0","_col1","_col2","_col3","_col4"]
-          TableScan [TS_0] (rows=5 width=90)
+          TableScan [TS_0] (rows=6 width=80)
             default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double
@@ -602,9 +602,9 @@ Stage-0
     Stage-1
       Map 1 llap
       File Output Operator [FS_2]
-        Select Operator [SEL_1] (rows=5 width=151)
+        Select Operator [SEL_1] (rows=6 width=178)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21"]
-          TableScan [TS_0] (rows=5 width=151)
+          TableScan [TS_0] (rows=6 width=178)
             default@table_change_numeric_group_string_group_multi_ints_string_group,table_change_numeric_group_string_group_multi_ints_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","c16","c17","c18","c19","c20","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group
@@ -757,9 +757,9 @@ Stage-0
     Stage-1
       Map 1 llap
       File Output Operator [FS_2]
-        Select Operator [SEL_1] (rows=5 width=250)
+        Select Operator [SEL_1] (rows=6 width=249)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16"]
-          TableScan [TS_0] (rows=5 width=250)
+          TableScan [TS_0] (rows=6 width=249)
             default@table_change_numeric_group_string_group_floating_string_group,table_change_numeric_group_string_group_floating_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out
index 82bc8a4..06d2372 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out
@@ -70,9 +70,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=20)
+        Select Operator [SEL_3] (rows=6 width=20)
           Output:["_col0","_col1","_col2"]
-          TableScan [TS_0] (rows=5 width=20)
+          TableScan [TS_0] (rows=6 width=20)
             default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: select insert_num,a,b from table_add_int_permute_select
@@ -183,9 +183,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=20)
+        Select Operator [SEL_3] (rows=6 width=21)
           Output:["_col0","_col1","_col2"]
-          TableScan [TS_0] (rows=5 width=20)
+          TableScan [TS_0] (rows=6 width=21)
             default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: select insert_num,a,b from table_add_int_string_permute_select
@@ -358,9 +358,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=90)
+        Select Operator [SEL_3] (rows=6 width=80)
           Output:["_col0","_col1","_col2","_col3","_col4"]
-          TableScan [TS_0] (rows=5 width=90)
+          TableScan [TS_0] (rows=6 width=80)
             default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double
@@ -602,9 +602,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=151)
+        Select Operator [SEL_3] (rows=6 width=178)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21"]
-          TableScan [TS_0] (rows=5 width=151)
+          TableScan [TS_0] (rows=6 width=178)
             default@table_change_numeric_group_string_group_multi_ints_string_group,table_change_numeric_group_string_group_multi_ints_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","c16","c17","c18","c19","c20","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group
@@ -757,9 +757,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=250)
+        Select Operator [SEL_3] (rows=6 width=249)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16"]
-          TableScan [TS_0] (rows=5 width=250)
+          TableScan [TS_0] (rows=6 width=249)
             default@table_change_numeric_group_string_group_floating_string_group,table_change_numeric_group_string_group_floating_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out
index 82bc8a4..06d2372 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out
@@ -70,9 +70,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=20)
+        Select Operator [SEL_3] (rows=6 width=20)
           Output:["_col0","_col1","_col2"]
-          TableScan [TS_0] (rows=5 width=20)
+          TableScan [TS_0] (rows=6 width=20)
             default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: select insert_num,a,b from table_add_int_permute_select
@@ -183,9 +183,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=20)
+        Select Operator [SEL_3] (rows=6 width=21)
           Output:["_col0","_col1","_col2"]
-          TableScan [TS_0] (rows=5 width=20)
+          TableScan [TS_0] (rows=6 width=21)
             default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: select insert_num,a,b from table_add_int_string_permute_select
@@ -358,9 +358,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=90)
+        Select Operator [SEL_3] (rows=6 width=80)
           Output:["_col0","_col1","_col2","_col3","_col4"]
-          TableScan [TS_0] (rows=5 width=90)
+          TableScan [TS_0] (rows=6 width=80)
             default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double
@@ -602,9 +602,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=151)
+        Select Operator [SEL_3] (rows=6 width=178)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21"]
-          TableScan [TS_0] (rows=5 width=151)
+          TableScan [TS_0] (rows=6 width=178)
             default@table_change_numeric_group_string_group_multi_ints_string_group,table_change_numeric_group_string_group_multi_ints_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","c16","c17","c18","c19","c20","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group
@@ -757,9 +757,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=250)
+        Select Operator [SEL_3] (rows=6 width=249)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16"]
-          TableScan [TS_0] (rows=5 width=250)
+          TableScan [TS_0] (rows=6 width=249)
             default@table_change_numeric_group_string_group_floating_string_group,table_change_numeric_group_string_group_floating_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/partition_coltype_literals.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/partition_coltype_literals.q.out b/ql/src/test/results/clientpositive/partition_coltype_literals.q.out
index 61d6423..fad937d 100644
--- a/ql/src/test/results/clientpositive/partition_coltype_literals.q.out
+++ b/ql/src/test/results/clientpositive/partition_coltype_literals.q.out
@@ -355,7 +355,7 @@ Database:           	default
 Table:              	partcoltypenum      	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"value\":\"true\"}}
 #### A masked pattern was here ####
 	numFiles            	2                   
 	numRows             	30                  
@@ -404,7 +404,7 @@ Database:           	default
 Table:              	partcoltypenum      	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"value\":\"true\"}}
 #### A masked pattern was here ####
 	numFiles            	2                   
 	numRows             	30                  

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/show_tblproperties.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/show_tblproperties.q.out b/ql/src/test/results/clientpositive/show_tblproperties.q.out
index 9377beb..005de89 100644
--- a/ql/src/test/results/clientpositive/show_tblproperties.q.out
+++ b/ql/src/test/results/clientpositive/show_tblproperties.q.out
@@ -36,6 +36,7 @@ PREHOOK: query: show tblproperties tmpfoo
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: show tblproperties tmpfoo
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 bar	bar value
 #### A masked pattern was here ####
 numFiles	0
@@ -53,6 +54,7 @@ PREHOOK: query: show tblproperties default.tmpfoo
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: show tblproperties default.tmpfoo
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 bar	bar value
 #### A masked pattern was here ####
 numFiles	0
@@ -106,6 +108,7 @@ PREHOOK: query: show tblproperties default.tmpfoo
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: show tblproperties default.tmpfoo
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 bar	bar value
 #### A masked pattern was here ####
 numFiles	0
@@ -123,6 +126,7 @@ PREHOOK: query: show tblproperties tmpfoo
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: show tblproperties tmpfoo
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 bar	bar value1
 #### A masked pattern was here ####
 numFiles	0
@@ -146,6 +150,7 @@ PREHOOK: query: show tblproperties db1.tmpfoo
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: show tblproperties db1.tmpfoo
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 bar	bar value1
 #### A masked pattern was here ####
 numFiles	0

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/stats_invalidation.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats_invalidation.q.out b/ql/src/test/results/clientpositive/stats_invalidation.q.out
index d822f4f..a0e7663 100644
--- a/ql/src/test/results/clientpositive/stats_invalidation.q.out
+++ b/ql/src/test/results/clientpositive/stats_invalidation.q.out
@@ -88,6 +88,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 #### A masked pattern was here ####
 	numFiles            	1                   
 	numRows             	500                 

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/unset_table_view_property.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/unset_table_view_property.q.out b/ql/src/test/results/clientpositive/unset_table_view_property.q.out
index f9c9697..d28d800 100644
--- a/ql/src/test/results/clientpositive/unset_table_view_property.q.out
+++ b/ql/src/test/results/clientpositive/unset_table_view_property.q.out
@@ -34,6 +34,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 a	1
 c	3
 #### A masked pattern was here ####
@@ -54,6 +55,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 #### A masked pattern was here ####
 numFiles	0
 numRows	0
@@ -72,6 +74,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 a	1
 c	3
 d	4
@@ -93,6 +96,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 c	3
 #### A masked pattern was here ####
 numFiles	0
@@ -112,6 +116,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 #### A masked pattern was here ####
 numFiles	0
 numRows	0
@@ -130,6 +135,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 a	1
 b	2
 c	3
@@ -152,6 +158,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 a	1
 c	3
 #### A masked pattern was here ####
@@ -172,6 +179,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 a	1
 #### A masked pattern was here ####
 numFiles	0


[2/2] hive git commit: HIVE-15653: Some ALTER TABLE commands drop table stats (Chaoyu Tang, reviewed by PengCheng Xiong)

Posted by ct...@apache.org.
HIVE-15653: Some ALTER TABLE commands drop table stats (Chaoyu Tang, reviewed by PengCheng Xiong)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5468207e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5468207e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5468207e

Branch: refs/heads/master
Commit: 5468207e430b8b3fad6d65f2fcd80d1042cf8327
Parents: 77dfbe0
Author: Chaoyu Tang <ct...@cloudera.com>
Authored: Tue Jan 31 09:46:13 2017 -0500
Committer: Chaoyu Tang <ct...@cloudera.com>
Committed: Tue Jan 31 09:46:13 2017 -0500

----------------------------------------------------------------------
 .../hadoop/hive/common/StatsSetupConst.java     |  24 +
 .../hadoop/hive/metastore/HiveAlterHandler.java |   7 +
 .../hadoop/hive/metastore/MetaStoreUtils.java   |  13 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |  29 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |  44 +-
 .../clientpositive/alter_table_stats_status.q   |  48 ++
 .../clientnegative/unset_table_property.q.out   |   1 +
 .../clientpositive/alter_file_format.q.out      |   6 +
 .../clientpositive/alter_skewed_table.q.out     |   3 +
 .../clientpositive/alter_table_not_sorted.q.out |   1 +
 .../alter_table_stats_status.q.out              | 572 +++++++++++++++++++
 .../columnStatsUpdateForStatsOptimizer_2.q.out  |  37 +-
 .../create_alter_list_bucketing_table1.q.out    |   5 +
 .../results/clientpositive/create_like.q.out    |   1 +
 .../describe_comment_nonascii.q.out             |   1 +
 .../llap/orc_predicate_pushdown.q.out           | 112 ++--
 .../llap/schema_evol_orc_nonvec_table.q.out     |  20 +-
 .../llap/schema_evol_orc_vec_table.q.out        |  20 +-
 .../llap/schema_evol_text_nonvec_table.q.out    |  20 +-
 .../llap/schema_evol_text_vec_table.q.out       |  20 +-
 .../llap/schema_evol_text_vecrow_table.q.out    |  20 +-
 .../partition_coltype_literals.q.out            |   4 +-
 .../clientpositive/show_tblproperties.q.out     |   5 +
 .../clientpositive/stats_invalidation.q.out     |   1 +
 .../unset_table_view_property.q.out             |   8 +
 25 files changed, 853 insertions(+), 169 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java b/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
index 25c7508..c78f005 100644
--- a/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
+++ b/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
@@ -232,6 +232,30 @@ public class StatsSetupConst {
     }
   }
 
+  public static void removeColumnStatsState(Map<String, String> params, List<String> colNames) {
+    String statsAcc;
+    if (params != null && (statsAcc = params.get(COLUMN_STATS_ACCURATE)) != null) {
+      // statsAcc may not be jason format, which will throw exception
+      JSONObject stats = parseStatsAcc(statsAcc);
+      try {
+        JSONObject colStats = stats.getJSONObject(COLUMN_STATS);
+        for (String colName : colNames) {
+          if (colStats.has(colName)) {
+            colStats.remove(colName);
+          }
+        }
+        if (colStats.length() != 0) {
+          stats.put(COLUMN_STATS, colStats);
+        } else {
+          stats.remove(COLUMN_STATS);
+        }
+        params.put(COLUMN_STATS_ACCURATE, stats.toString());
+      } catch (JSONException e) {
+        LOG.debug(e.getMessage());
+      }
+    }
+  }
+
   public static void setBasicStatsStateForCreateTable(Map<String, String> params, String setting) {
     if (TRUE.equals(setting)) {
       for (String stat : StatsSetupConst.supportedStats) {

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
index 64d9fc1..bae39ac 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
@@ -729,6 +729,7 @@ public class HiveAlterHandler implements AlterHandler {
       assert (partsColStats.size() <= 1);
       for (ColumnStatistics partColStats : partsColStats) { //actually only at most one loop
         List<ColumnStatisticsObj> statsObjs = partColStats.getStatsObj();
+        List<String> deletedCols = new ArrayList<String>();
         for (ColumnStatisticsObj statsObj : statsObjs) {
           boolean found =false;
           for (FieldSchema newCol : newCols) {
@@ -741,8 +742,10 @@ public class HiveAlterHandler implements AlterHandler {
           if (!found) {
             msdb.deletePartitionColumnStatistics(dbName, tableName, oldPartName, partVals,
                 statsObj.getColName());
+            deletedCols.add(statsObj.getColName());
           }
         }
+        StatsSetupConst.removeColumnStatsState(newPart.getParameters(), deletedCols);
       }
     } catch (NoSuchObjectException nsoe) {
       LOG.debug("Could not find db entry." + nsoe);
@@ -827,6 +830,7 @@ public class HiveAlterHandler implements AlterHandler {
           } else {
             List<ColumnStatisticsObj> statsObjs = colStats.getStatsObj();
             if (statsObjs != null) {
+              List<String> deletedCols = new ArrayList<String>();
               for (ColumnStatisticsObj statsObj : statsObjs) {
                 boolean found = false;
                 for (FieldSchema newCol : newCols) {
@@ -841,11 +845,14 @@ public class HiveAlterHandler implements AlterHandler {
                   if (!newDbName.equals(dbName) || !newTableName.equals(tableName)) {
                     msdb.deleteTableColumnStatistics(dbName, tableName, statsObj.getColName());
                     newStatsObjs.add(statsObj);
+                    deletedCols.add(statsObj.getColName());
                   }
                 } else {
                   msdb.deleteTableColumnStatistics(dbName, tableName, statsObj.getColName());
+                  deletedCols.add(statsObj.getColName());
                 }
               }
+              StatsSetupConst.removeColumnStatsState(newTable.getParameters(), deletedCols);
             }
           }
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index b21b9ed..9754466 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -292,11 +292,14 @@ public class MetaStoreUtils {
       return true;
     }
 
-    if (environmentContext != null
-        && environmentContext.isSetProperties()
-        && StatsSetupConst.TASK.equals(environmentContext.getProperties().get(
-            StatsSetupConst.STATS_GENERATED))) {
-      return true;
+    if (environmentContext != null && environmentContext.isSetProperties()) {
+      String statsType = environmentContext.getProperties().get(StatsSetupConst.STATS_GENERATED);
+      // no matter STATS_GENERATED is USER or TASK, all need to re-calculate the stats:
+      // USER: alter table .. update statistics
+      // TASK: from some sql operation which could collect and compute stats
+      if (StatsSetupConst.TASK.equals(statsType) || StatsSetupConst.USER.equals(statsType)) {
+        return true;
+      }
     }
 
     // requires to calculate stats if new and old have different fast stats

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 9511b46..ad701f6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -1188,10 +1188,12 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
       throws HiveException {
 
     Table tbl = db.getTable(touchDesc.getTableName());
+    EnvironmentContext environmentContext = new EnvironmentContext();
+    environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
 
     if (touchDesc.getPartSpec() == null) {
       try {
-        db.alterTable(touchDesc.getTableName(), tbl, null);
+        db.alterTable(touchDesc.getTableName(), tbl, environmentContext);
       } catch (InvalidOperationException e) {
         throw new HiveException("Uable to update table");
       }
@@ -1203,7 +1205,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
         throw new HiveException("Specified partition does not exist");
       }
       try {
-        db.alterPartition(touchDesc.getTableName(), part, null);
+        db.alterPartition(touchDesc.getTableName(), part, environmentContext);
       } catch (InvalidOperationException e) {
         throw new HiveException(e);
       }
@@ -3510,6 +3512,16 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
 
   private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Partition part)
       throws HiveException {
+    EnvironmentContext environmentContext = alterTbl.getEnvironmentContext();
+    if (environmentContext == null) {
+      environmentContext = new EnvironmentContext();
+      alterTbl.setEnvironmentContext(environmentContext);
+    }
+    // do not need update stats in alter table/partition operations
+    if (environmentContext.getProperties() == null ||
+        environmentContext.getProperties().get(StatsSetupConst.DO_NOT_UPDATE_STATS) == null) {
+      environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE);
+    }
 
     if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.RENAME) {
       tbl.setDbName(Utilities.getDatabaseName(alterTbl.getNewName()));
@@ -3647,6 +3659,10 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
       }
       sd.setCols(alterTbl.getNewCols());
     } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDPROPS) {
+      if (StatsSetupConst.USER.equals(environmentContext.getProperties()
+              .get(StatsSetupConst.STATS_GENERATED))) {
+        environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS);
+      }
       if (part != null) {
         part.getTPartition().getParameters().putAll(alterTbl.getProps());
       } else {
@@ -3654,6 +3670,11 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
       }
     } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.DROPPROPS) {
       Iterator<String> keyItr = alterTbl.getProps().keySet().iterator();
+      if (StatsSetupConst.USER.equals(environmentContext.getProperties()
+          .get(StatsSetupConst.STATS_GENERATED))) {
+        // drop a stats parameter, which triggers recompute stats update automatically
+        environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS);
+      }
       while (keyItr.hasNext()) {
         if (part != null) {
           part.getTPartition().getParameters().remove(keyItr.next());
@@ -3747,6 +3768,8 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
       } catch (URISyntaxException e) {
         throw new HiveException(e);
       }
+      environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS);
+
     } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDSKEWEDBY) {
       // Validation's been done at compile time. no validation is needed here.
       List<String> skewedColNames = null;
@@ -3792,6 +3815,8 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
           throw new HiveException(e);
         }
       }
+
+      environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS);
     } else if (alterTbl.getOp() == AlterTableTypes.ALTERBUCKETNUM) {
       if (part != null) {
         if (part.getBucketCount() == alterTbl.getNumberBuckets()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 0f472e7..ba54d4e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -1361,30 +1361,34 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
     HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(0))
         .getChild(0));
     EnvironmentContext environmentContext = null;
-    if (queryState.getCommandType()
-        .equals(HiveOperation.ALTERTABLE_UPDATETABLESTATS.getOperationName())
-        || queryState.getCommandType()
-            .equals(HiveOperation.ALTERTABLE_UPDATEPARTSTATS.getOperationName())) {
-      // we need to check if the properties are valid, especially for stats.
-      boolean changeStatsSucceeded = false;
-      for (Entry<String, String> entry : mapProp.entrySet()) {
-        // we make sure that we do not change anything if there is anything
-        // wrong.
-        if (entry.getKey().equals(StatsSetupConst.ROW_COUNT)
-            || entry.getKey().equals(StatsSetupConst.RAW_DATA_SIZE)) {
-          try {
-            Long.parseLong(entry.getValue());
-            changeStatsSucceeded = true;
-          } catch (Exception e) {
-            throw new SemanticException("AlterTable " + entry.getKey() + " failed with value "
-                + entry.getValue());
-          }
-        } else {
+    // we need to check if the properties are valid, especially for stats.
+    // they might be changed via alter table .. update statistics or
+    // alter table .. set tblproperties. If the property is not row_count
+    // or raw_data_size, it could not be changed through update statistics
+    boolean changeStatsSucceeded = false;
+    for (Entry<String, String> entry : mapProp.entrySet()) {
+      // we make sure that we do not change anything if there is anything
+      // wrong.
+      if (entry.getKey().equals(StatsSetupConst.ROW_COUNT)
+          || entry.getKey().equals(StatsSetupConst.RAW_DATA_SIZE)) {
+        try {
+          Long.parseLong(entry.getValue());
+          changeStatsSucceeded = true;
+        } catch (Exception e) {
+          throw new SemanticException("AlterTable " + entry.getKey() + " failed with value "
+              + entry.getValue());
+        }
+      } else {
+        if (queryState.getCommandType()
+            .equals(HiveOperation.ALTERTABLE_UPDATETABLESTATS.getOperationName())
+            || queryState.getCommandType()
+                .equals(HiveOperation.ALTERTABLE_UPDATEPARTSTATS.getOperationName())) {
           throw new SemanticException("AlterTable UpdateStats " + entry.getKey()
-              + " failed because the only valid keys are" + StatsSetupConst.ROW_COUNT + " and "
+              + " failed because the only valid keys are " + StatsSetupConst.ROW_COUNT + " and "
               + StatsSetupConst.RAW_DATA_SIZE);
         }
       }
+
       if (changeStatsSucceeded) {
         environmentContext = new EnvironmentContext();
         environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, StatsSetupConst.USER);

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/queries/clientpositive/alter_table_stats_status.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/alter_table_stats_status.q b/ql/src/test/queries/clientpositive/alter_table_stats_status.q
new file mode 100644
index 0000000..8e07b81
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/alter_table_stats_status.q
@@ -0,0 +1,48 @@
+create database statsdb;
+use statsdb;
+create table srctable like default.src;
+load data local inpath '../../data/files/kv1.txt' overwrite into table srctable;
+
+analyze table srctable compute statistics;
+describe formatted srctable;
+
+alter table srctable touch;
+alter table srctable rename to statstable;
+
+alter table statstable add columns (newcol string);
+alter table statstable change key key string;
+alter table statstable set tblproperties('testtblstats'='unchange');
+describe formatted statstable;
+
+alter table statstable update statistics set ('numRows' = '1000');
+describe formatted statstable;
+
+analyze table statstable compute statistics;
+describe formatted statstable;
+alter table statstable set location '${system:test.tmp.dir}/newdir';
+describe formatted statstable;
+
+drop table statstable;
+
+create table srcpart like default.srcpart;
+load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='11');
+load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='12');
+
+analyze table srcpart partition (ds='2008-04-08', hr='11') compute statistics;
+describe formatted srcpart partition (ds='2008-04-08', hr='11');
+
+alter table srcpart touch;
+alter table srcpart partition (ds='2008-04-08', hr='11') rename to partition (ds='2017-01-19', hr='11');
+alter table srcpart partition (ds='2017-01-19', hr='11') add columns (newcol string);
+alter table srcpart partition (ds='2017-01-19', hr='11') change key key string;
+alter table srcpart set tblproperties('testpartstats'='unchange');
+describe formatted srcpart partition (ds='2017-01-19', hr='11');
+
+alter table srcpart partition (ds='2017-01-19', hr='11') update statistics set ('numRows' = '1000');
+describe formatted srcpart partition (ds='2017-01-19', hr='11');
+
+analyze table srcpart partition (ds='2017-01-19', hr='11') compute statistics;
+describe formatted srcpart partition (ds='2017-01-19', hr='11');
+
+drop table srcpart;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientnegative/unset_table_property.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/unset_table_property.q.out b/ql/src/test/results/clientnegative/unset_table_property.q.out
index 0705b92..4aedfc5 100644
--- a/ql/src/test/results/clientnegative/unset_table_property.q.out
+++ b/ql/src/test/results/clientnegative/unset_table_property.q.out
@@ -18,6 +18,7 @@ PREHOOK: query: SHOW TBLPROPERTIES testTable
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES testTable
 POSTHOOK: type: SHOW_TBLPROPERTIES
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true"}
 a	1
 c	3
 #### A masked pattern was here ####

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/alter_file_format.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_file_format.q.out b/ql/src/test/results/clientpositive/alter_file_format.q.out
index a69b423..ca569be 100644
--- a/ql/src/test/results/clientpositive/alter_file_format.q.out
+++ b/ql/src/test/results/clientpositive/alter_file_format.q.out
@@ -67,6 +67,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   
@@ -110,6 +111,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   
@@ -153,6 +155,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   
@@ -196,6 +199,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   
@@ -239,6 +243,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   
@@ -282,6 +287,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/alter_skewed_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_skewed_table.q.out b/ql/src/test/results/clientpositive/alter_skewed_table.q.out
index 0f60ba3..fefef4c 100644
--- a/ql/src/test/results/clientpositive/alter_skewed_table.q.out
+++ b/ql/src/test/results/clientpositive/alter_skewed_table.q.out
@@ -67,6 +67,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   
@@ -169,6 +170,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   
@@ -267,6 +269,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out b/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out
index 566b804..5afb7fa 100644
--- a/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out
+++ b/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out
@@ -68,6 +68,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 	SORTBUCKETCOLSPREFIX	TRUE                
 #### A masked pattern was here ####
 	numFiles            	0                   

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/alter_table_stats_status.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_table_stats_status.q.out b/ql/src/test/results/clientpositive/alter_table_stats_status.q.out
new file mode 100644
index 0000000..3404f88
--- /dev/null
+++ b/ql/src/test/results/clientpositive/alter_table_stats_status.q.out
@@ -0,0 +1,572 @@
+PREHOOK: query: create database statsdb
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:statsdb
+POSTHOOK: query: create database statsdb
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:statsdb
+PREHOOK: query: use statsdb
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:statsdb
+POSTHOOK: query: use statsdb
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:statsdb
+PREHOOK: query: create table srctable like default.src
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:statsdb
+PREHOOK: Output: statsdb@srctable
+POSTHOOK: query: create table srctable like default.src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:statsdb
+POSTHOOK: Output: statsdb@srctable
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srctable
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: statsdb@srctable
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srctable
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: statsdb@srctable
+PREHOOK: query: analyze table srctable compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: statsdb@srctable
+PREHOOK: Output: statsdb@srctable
+POSTHOOK: query: analyze table srctable compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: statsdb@srctable
+POSTHOOK: Output: statsdb@srctable
+PREHOOK: query: describe formatted srctable
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: statsdb@srctable
+POSTHOOK: query: describe formatted srctable
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: statsdb@srctable
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+	 	 
+# Detailed Table Information	 	 
+Database:           	statsdb             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	1                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: alter table srctable touch
+PREHOOK: type: ALTERTABLE_TOUCH
+PREHOOK: Input: statsdb@srctable
+PREHOOK: Output: statsdb@srctable
+POSTHOOK: query: alter table srctable touch
+POSTHOOK: type: ALTERTABLE_TOUCH
+POSTHOOK: Input: statsdb@srctable
+POSTHOOK: Output: statsdb@srctable
+PREHOOK: query: alter table srctable rename to statstable
+PREHOOK: type: ALTERTABLE_RENAME
+PREHOOK: Input: statsdb@srctable
+PREHOOK: Output: statsdb@srctable
+POSTHOOK: query: alter table srctable rename to statstable
+POSTHOOK: type: ALTERTABLE_RENAME
+POSTHOOK: Input: statsdb@srctable
+POSTHOOK: Output: statsdb@srctable
+POSTHOOK: Output: statsdb@statstable
+PREHOOK: query: alter table statstable add columns (newcol string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: statsdb@statstable
+PREHOOK: Output: statsdb@statstable
+POSTHOOK: query: alter table statstable add columns (newcol string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: statsdb@statstable
+POSTHOOK: Output: statsdb@statstable
+PREHOOK: query: alter table statstable change key key string
+PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: statsdb@statstable
+PREHOOK: Output: statsdb@statstable
+POSTHOOK: query: alter table statstable change key key string
+POSTHOOK: type: ALTERTABLE_RENAMECOL
+POSTHOOK: Input: statsdb@statstable
+POSTHOOK: Output: statsdb@statstable
+PREHOOK: query: alter table statstable set tblproperties('testtblstats'='unchange')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: statsdb@statstable
+PREHOOK: Output: statsdb@statstable
+POSTHOOK: query: alter table statstable set tblproperties('testtblstats'='unchange')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: statsdb@statstable
+POSTHOOK: Output: statsdb@statstable
+PREHOOK: query: describe formatted statstable
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: statsdb@statstable
+POSTHOOK: query: describe formatted statstable
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: statsdb@statstable
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+newcol              	string              	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	statsdb             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+#### A masked pattern was here ####
+	numFiles            	1                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	testtblstats        	unchange            
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: alter table statstable update statistics set ('numRows' = '1000')
+PREHOOK: type: ALTERTABLE_UPDATETABLESTATS
+PREHOOK: Input: statsdb@statstable
+PREHOOK: Output: statsdb@statstable
+POSTHOOK: query: alter table statstable update statistics set ('numRows' = '1000')
+POSTHOOK: type: ALTERTABLE_UPDATETABLESTATS
+POSTHOOK: Input: statsdb@statstable
+POSTHOOK: Output: statsdb@statstable
+PREHOOK: query: describe formatted statstable
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: statsdb@statstable
+POSTHOOK: query: describe formatted statstable
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: statsdb@statstable
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+newcol              	string              	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	statsdb             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+#### A masked pattern was here ####
+	numFiles            	1                   
+	numRows             	1000                
+	rawDataSize         	5312                
+	testtblstats        	unchange            
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: analyze table statstable compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: statsdb@statstable
+PREHOOK: Output: statsdb@statstable
+POSTHOOK: query: analyze table statstable compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: statsdb@statstable
+POSTHOOK: Output: statsdb@statstable
+PREHOOK: query: describe formatted statstable
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: statsdb@statstable
+POSTHOOK: query: describe formatted statstable
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: statsdb@statstable
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+newcol              	string              	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	statsdb             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+#### A masked pattern was here ####
+	numFiles            	1                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	testtblstats        	unchange            
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+#### A masked pattern was here ####
+PREHOOK: type: ALTERTABLE_LOCATION
+PREHOOK: Input: statsdb@statstable
+#### A masked pattern was here ####
+PREHOOK: Output: statsdb@statstable
+#### A masked pattern was here ####
+POSTHOOK: type: ALTERTABLE_LOCATION
+POSTHOOK: Input: statsdb@statstable
+#### A masked pattern was here ####
+POSTHOOK: Output: statsdb@statstable
+PREHOOK: query: describe formatted statstable
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: statsdb@statstable
+POSTHOOK: query: describe formatted statstable
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: statsdb@statstable
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+newcol              	string              	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	statsdb             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+#### A masked pattern was here ####
+	numFiles            	0                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	testtblstats        	unchange            
+	totalSize           	0                   
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: drop table statstable
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: statsdb@statstable
+PREHOOK: Output: statsdb@statstable
+POSTHOOK: query: drop table statstable
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: statsdb@statstable
+POSTHOOK: Output: statsdb@statstable
+PREHOOK: query: create table srcpart like default.srcpart
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:statsdb
+PREHOOK: Output: statsdb@srcpart
+POSTHOOK: query: create table srcpart like default.srcpart
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:statsdb
+POSTHOOK: Output: statsdb@srcpart
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='11')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: statsdb@srcpart
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: statsdb@srcpart
+POSTHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='12')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: statsdb@srcpart
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='12')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: statsdb@srcpart
+POSTHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=12
+PREHOOK: query: analyze table srcpart partition (ds='2008-04-08', hr='11') compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: statsdb@srcpart
+PREHOOK: Input: statsdb@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Output: statsdb@srcpart
+PREHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: query: analyze table srcpart partition (ds='2008-04-08', hr='11') compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: statsdb@srcpart
+POSTHOOK: Input: statsdb@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Output: statsdb@srcpart
+POSTHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11
+PREHOOK: query: describe formatted srcpart partition (ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: statsdb@srcpart
+POSTHOOK: query: describe formatted srcpart partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: statsdb@srcpart
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	statsdb             	 
+Table:              	srcpart             	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	1                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: alter table srcpart touch
+PREHOOK: type: ALTERTABLE_TOUCH
+PREHOOK: Input: statsdb@srcpart
+PREHOOK: Output: statsdb@srcpart
+POSTHOOK: query: alter table srcpart touch
+POSTHOOK: type: ALTERTABLE_TOUCH
+POSTHOOK: Input: statsdb@srcpart
+POSTHOOK: Output: statsdb@srcpart
+PREHOOK: query: alter table srcpart partition (ds='2008-04-08', hr='11') rename to partition (ds='2017-01-19', hr='11')
+PREHOOK: type: ALTERTABLE_RENAMEPART
+PREHOOK: Input: statsdb@srcpart
+PREHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: query: alter table srcpart partition (ds='2008-04-08', hr='11') rename to partition (ds='2017-01-19', hr='11')
+POSTHOOK: type: ALTERTABLE_RENAMEPART
+POSTHOOK: Input: statsdb@srcpart
+POSTHOOK: Input: statsdb@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11
+PREHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') add columns (newcol string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: statsdb@srcpart
+PREHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11
+POSTHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') add columns (newcol string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: statsdb@srcpart
+POSTHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11
+POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11
+PREHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') change key key string
+PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: statsdb@srcpart
+PREHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11
+POSTHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') change key key string
+POSTHOOK: type: ALTERTABLE_RENAMECOL
+POSTHOOK: Input: statsdb@srcpart
+POSTHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11
+POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11
+PREHOOK: query: alter table srcpart set tblproperties('testpartstats'='unchange')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: statsdb@srcpart
+PREHOOK: Output: statsdb@srcpart
+POSTHOOK: query: alter table srcpart set tblproperties('testpartstats'='unchange')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: statsdb@srcpart
+POSTHOOK: Output: statsdb@srcpart
+PREHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: statsdb@srcpart
+POSTHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: statsdb@srcpart
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+newcol              	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2017-01-19, 11]    	 
+Database:           	statsdb             	 
+Table:              	srcpart             	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+#### A masked pattern was here ####
+	numFiles            	1                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') update statistics set ('numRows' = '1000')
+PREHOOK: type: ALTERTABLE_UPDATEPARTSTATS
+PREHOOK: Input: statsdb@srcpart
+PREHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11
+POSTHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') update statistics set ('numRows' = '1000')
+POSTHOOK: type: ALTERTABLE_UPDATEPARTSTATS
+POSTHOOK: Input: statsdb@srcpart
+POSTHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11
+POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11
+PREHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: statsdb@srcpart
+POSTHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: statsdb@srcpart
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+newcol              	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2017-01-19, 11]    	 
+Database:           	statsdb             	 
+Table:              	srcpart             	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+#### A masked pattern was here ####
+	numFiles            	1                   
+	numRows             	1000                
+	rawDataSize         	5312                
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: analyze table srcpart partition (ds='2017-01-19', hr='11') compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: statsdb@srcpart
+PREHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11
+PREHOOK: Output: statsdb@srcpart
+PREHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11
+POSTHOOK: query: analyze table srcpart partition (ds='2017-01-19', hr='11') compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: statsdb@srcpart
+POSTHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11
+POSTHOOK: Output: statsdb@srcpart
+POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11
+PREHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: statsdb@srcpart
+POSTHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: statsdb@srcpart
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+newcol              	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2017-01-19, 11]    	 
+Database:           	statsdb             	 
+Table:              	srcpart             	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+#### A masked pattern was here ####
+	numFiles            	1                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: drop table srcpart
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: statsdb@srcpart
+PREHOOK: Output: statsdb@srcpart
+POSTHOOK: query: drop table srcpart
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: statsdb@srcpart
+POSTHOOK: Output: statsdb@srcpart

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out
index 6c7d0ee..af21343 100644
--- a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out
+++ b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out
@@ -168,6 +168,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	2                   
 	numRows             	3                   
@@ -247,44 +248,12 @@ PREHOOK: type: QUERY
 POSTHOOK: query: explain select count(1) from calendar
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: calendar
-            Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
-              Group By Operator
-                aggregations: count(1)
-                mode: hash
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  sort order: 
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          mode: mergepartial
-          outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: -1
+      limit: 1
       Processor Tree:
         ListSink
 

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out b/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out
index 6af696a..7d8e5c1 100644
--- a/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out
+++ b/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out
@@ -74,6 +74,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   
@@ -120,6 +121,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   
@@ -173,6 +175,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   
@@ -219,6 +222,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   
@@ -264,6 +268,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/create_like.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_like.q.out b/ql/src/test/results/clientpositive/create_like.q.out
index 58d9879..ff2e752 100644
--- a/ql/src/test/results/clientpositive/create_like.q.out
+++ b/ql/src/test/results/clientpositive/create_like.q.out
@@ -354,6 +354,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 	avro.schema.literal 	{                   
 	                    	  \"namespace\": \"testing.hive.avro.serde\",
 	                    	  \"name\": \"doctors\",

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out b/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out
index de1cca9..70f234e 100644
--- a/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out
+++ b/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out
@@ -49,6 +49,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
 #### A masked pattern was here ####
 	numFiles            	0                   
 	numRows             	0                   

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out b/ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out
index 133d6c0..4bba265 100644
--- a/ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out
@@ -133,11 +133,11 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: orc_pred
-                  Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: hash(t) (type: int)
                     outputColumnNames: _col0
-                    Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: sum(_col0)
                       mode: hash
@@ -191,11 +191,11 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: orc_pred
-                  Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: hash(t) (type: int)
                     outputColumnNames: _col0
-                    Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: sum(_col0)
                       mode: hash
@@ -321,14 +321,14 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: orc_pred
-                  Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: ((t < 0) and (UDFToInteger(t) > -2)) (type: boolean)
-                    Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: hash(t) (type: int)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE
                       Group By Operator
                         aggregations: sum(_col0)
                         mode: hash
@@ -389,14 +389,14 @@ STAGE PLANS:
                 TableScan
                   alias: orc_pred
                   filterExpr: ((t < 0) and (UDFToInteger(t) > -2)) (type: boolean)
-                  Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: ((t < 0) and (UDFToInteger(t) > -2)) (type: boolean)
-                    Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: hash(t) (type: int)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE
                       Group By Operator
                         aggregations: sum(_col0)
                         mode: hash
@@ -589,18 +589,18 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: orc_pred
-                  Statistics: Num rows: 232 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (s is not null and (s like 'bob%') and (not (t) IN (-1, -2, -3)) and t BETWEEN 25 AND 30) (type: boolean)
-                    Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: t (type: tinyint), s (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: tinyint), _col1 (type: string)
                         sort order: ++
-                        Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE
             Execution mode: llap
             LLAP IO: all inputs
         Reducer 2 
@@ -609,10 +609,10 @@ STAGE PLANS:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -655,18 +655,18 @@ STAGE PLANS:
                 TableScan
                   alias: orc_pred
                   filterExpr: (s is not null and (s like 'bob%') and (not (t) IN (-1, -2, -3)) and t BETWEEN 25 AND 30) (type: boolean)
-                  Statistics: Num rows: 232 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (s is not null and (s like 'bob%') and (not (t) IN (-1, -2, -3)) and t BETWEEN 25 AND 30) (type: boolean)
-                    Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: t (type: tinyint), s (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: tinyint), _col1 (type: string)
                         sort order: ++
-                        Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE
             Execution mode: llap
             LLAP IO: all inputs
         Reducer 2 
@@ -675,10 +675,10 @@ STAGE PLANS:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -786,18 +786,18 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: orc_pred
-                  Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: ((d >= 10.0) and (d < 12.0) and (s like '%son') and (t > 0) and si BETWEEN 300 AND 400 and (not (s like '%car%'))) (type: boolean)
-                    Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
                       outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col3 (type: string)
                         sort order: -
-                        Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                         TopN Hash Memory Usage: 0.1
                         value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
             Execution mode: llap
@@ -808,13 +808,13 @@ STAGE PLANS:
               Select Operator
                 expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 3
-                  Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -865,18 +865,18 @@ STAGE PLANS:
                 TableScan
                   alias: orc_pred
                   filterExpr: ((d >= 10.0) and (d < 12.0) and (s like '%son') and (t > 0) and si BETWEEN 300 AND 400 and (not (s like '%car%'))) (type: boolean)
-                  Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: ((d >= 10.0) and (d < 12.0) and (s like '%son') and (t > 0) and si BETWEEN 300 AND 400 and (not (s like '%car%'))) (type: boolean)
-                    Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
                       outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col3 (type: string)
                         sort order: -
-                        Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                         TopN Hash Memory Usage: 0.1
                         value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
             Execution mode: llap
@@ -887,13 +887,13 @@ STAGE PLANS:
               Select Operator
                 expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 3
-                  Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1010,18 +1010,18 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: orc_pred
-                  Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: ((t > 10) and (t <> 101) and (d >= 10) and (d < 12.0) and (s like '%son') and (not (s like '%car%')) and (t > 0) and si BETWEEN 300 AND 400) (type: boolean)
-                    Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
                       outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col3 (type: string)
                         sort order: -
-                        Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                         TopN Hash Memory Usage: 0.1
                         value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
             Execution mode: llap
@@ -1032,14 +1032,14 @@ STAGE PLANS:
               Select Operator
                 expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 3
-                  Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
                     key expressions: _col3 (type: string)
                     sort order: -
-                    Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                     TopN Hash Memory Usage: 0.1
                     value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
         Reducer 3 
@@ -1048,13 +1048,13 @@ STAGE PLANS:
               Select Operator
                 expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 3
-                  Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1110,18 +1110,18 @@ STAGE PLANS:
                 TableScan
                   alias: orc_pred
                   filterExpr: ((t > 10) and (t <> 101) and (d >= 10) and (d < 12.0) and (s like '%son') and (not (s like '%car%')) and (t > 0) and si BETWEEN 300 AND 400) (type: boolean)
-                  Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: ((t > 10) and (t <> 101) and (d >= 10) and (d < 12.0) and (s like '%son') and (not (s like '%car%')) and (t > 0) and si BETWEEN 300 AND 400) (type: boolean)
-                    Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
                       outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col3 (type: string)
                         sort order: -
-                        Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                         TopN Hash Memory Usage: 0.1
                         value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
             Execution mode: llap
@@ -1132,14 +1132,14 @@ STAGE PLANS:
               Select Operator
                 expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 3
-                  Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
                     key expressions: _col3 (type: string)
                     sort order: -
-                    Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                     TopN Hash Memory Usage: 0.1
                     value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
         Reducer 3 
@@ -1148,13 +1148,13 @@ STAGE PLANS:
               Select Operator
                 expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 3
-                  Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out
index 5745a7e..7b420e2 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out
@@ -70,9 +70,9 @@ Stage-0
     Stage-1
       Map 1 llap
       File Output Operator [FS_2]
-        Select Operator [SEL_1] (rows=5 width=99)
+        Select Operator [SEL_1] (rows=6 width=99)
           Output:["_col0","_col1","_col2"]
-          TableScan [TS_0] (rows=5 width=99)
+          TableScan [TS_0] (rows=6 width=99)
             default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: select insert_num,a,b from table_add_int_permute_select
@@ -183,9 +183,9 @@ Stage-0
     Stage-1
       Map 1 llap
       File Output Operator [FS_2]
-        Select Operator [SEL_1] (rows=5 width=99)
+        Select Operator [SEL_1] (rows=6 width=114)
           Output:["_col0","_col1","_col2"]
-          TableScan [TS_0] (rows=5 width=99)
+          TableScan [TS_0] (rows=6 width=114)
             default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: select insert_num,a,b from table_add_int_string_permute_select
@@ -358,9 +358,9 @@ Stage-0
     Stage-1
       Map 1 llap
       File Output Operator [FS_2]
-        Select Operator [SEL_1] (rows=5 width=422)
+        Select Operator [SEL_1] (rows=6 width=370)
           Output:["_col0","_col1","_col2","_col3","_col4"]
-          TableScan [TS_0] (rows=5 width=422)
+          TableScan [TS_0] (rows=6 width=370)
             default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double
@@ -602,9 +602,9 @@ Stage-0
     Stage-1
       Map 1 llap
       File Output Operator [FS_2]
-        Select Operator [SEL_1] (rows=5 width=164)
+        Select Operator [SEL_1] (rows=6 width=479)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21"]
-          TableScan [TS_0] (rows=5 width=164)
+          TableScan [TS_0] (rows=6 width=479)
             default@table_change_numeric_group_string_group_multi_ints_string_group,table_change_numeric_group_string_group_multi_ints_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","c16","c17","c18","c19","c20","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group
@@ -757,9 +757,9 @@ Stage-0
     Stage-1
       Map 1 llap
       File Output Operator [FS_2]
-        Select Operator [SEL_1] (rows=5 width=588)
+        Select Operator [SEL_1] (rows=6 width=752)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16"]
-          TableScan [TS_0] (rows=5 width=588)
+          TableScan [TS_0] (rows=6 width=752)
             default@table_change_numeric_group_string_group_floating_string_group,table_change_numeric_group_string_group_floating_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group

http://git-wip-us.apache.org/repos/asf/hive/blob/5468207e/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out
index 87dd01c..ecc4ee6 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out
@@ -70,9 +70,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=99)
+        Select Operator [SEL_3] (rows=6 width=99)
           Output:["_col0","_col1","_col2"]
-          TableScan [TS_0] (rows=5 width=99)
+          TableScan [TS_0] (rows=6 width=99)
             default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: select insert_num,a,b from table_add_int_permute_select
@@ -183,9 +183,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=99)
+        Select Operator [SEL_3] (rows=6 width=114)
           Output:["_col0","_col1","_col2"]
-          TableScan [TS_0] (rows=5 width=99)
+          TableScan [TS_0] (rows=6 width=114)
             default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"]
 
 PREHOOK: query: select insert_num,a,b from table_add_int_string_permute_select
@@ -358,9 +358,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=422)
+        Select Operator [SEL_3] (rows=6 width=370)
           Output:["_col0","_col1","_col2","_col3","_col4"]
-          TableScan [TS_0] (rows=5 width=422)
+          TableScan [TS_0] (rows=6 width=370)
             default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double
@@ -602,9 +602,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=164)
+        Select Operator [SEL_3] (rows=6 width=479)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21"]
-          TableScan [TS_0] (rows=5 width=164)
+          TableScan [TS_0] (rows=6 width=479)
             default@table_change_numeric_group_string_group_multi_ints_string_group,table_change_numeric_group_string_group_multi_ints_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","c16","c17","c18","c19","c20","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group
@@ -757,9 +757,9 @@ Stage-0
     Stage-1
       Map 1 vectorized, llap
       File Output Operator [FS_4]
-        Select Operator [SEL_3] (rows=5 width=588)
+        Select Operator [SEL_3] (rows=6 width=752)
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16"]
-          TableScan [TS_0] (rows=5 width=588)
+          TableScan [TS_0] (rows=6 width=752)
             default@table_change_numeric_group_string_group_floating_string_group,table_change_numeric_group_string_group_floating_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","b"]
 
 PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group