You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by px...@apache.org on 2017/04/04 21:40:16 UTC

[2/5] hive git commit: HIVE-16293: Column pruner should continue to work when SEL has more than 1 child (Pengcheng Xiong, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
index 2598d34..6ac97a1 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
@@ -300,7 +300,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col4
                 Statistics: Num rows: 60 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string), _col4 (type: string)
+                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col4 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 60 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -388,7 +388,7 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08
 POSTHOOK: Output: default@bucketmapjoin_tmp_result
-POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: bucketmapjoin_tmp_result.key EXPRESSION [(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result
@@ -433,7 +433,7 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08
 POSTHOOK: Output: default@bucketmapjoin_tmp_result
-POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: bucketmapjoin_tmp_result.key EXPRESSION [(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result
@@ -661,7 +661,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col4
                 Statistics: Num rows: 60 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string), _col4 (type: string)
+                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col4 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 60 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -749,7 +749,7 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08
 POSTHOOK: Output: default@bucketmapjoin_tmp_result
-POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: bucketmapjoin_tmp_result.key EXPRESSION [(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result
@@ -794,7 +794,7 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08
 POSTHOOK: Output: default@bucketmapjoin_tmp_result
-POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: bucketmapjoin_tmp_result.key EXPRESSION [(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out
index fb004e1..c1ea0ab 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out
@@ -296,7 +296,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col3
                 Statistics: Num rows: 28 Data size: 3025 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string), _col3 (type: string)
+                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 28 Data size: 3025 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -378,7 +378,7 @@ on a.key=b.key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcbucket_mapjoin
 POSTHOOK: Output: default@bucketmapjoin_tmp_result
-POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: bucketmapjoin_tmp_result.key EXPRESSION [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result
@@ -417,7 +417,7 @@ on a.key=b.key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcbucket_mapjoin
 POSTHOOK: Output: default@bucketmapjoin_tmp_result
-POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: bucketmapjoin_tmp_result.key EXPRESSION [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result
@@ -641,7 +641,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col3
                 Statistics: Num rows: 28 Data size: 3025 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string), _col3 (type: string)
+                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 28 Data size: 3025 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -723,7 +723,7 @@ on a.key=b.key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcbucket_mapjoin
 POSTHOOK: Output: default@bucketmapjoin_tmp_result
-POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: bucketmapjoin_tmp_result.key EXPRESSION [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result
@@ -762,7 +762,7 @@ on a.key=b.key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcbucket_mapjoin
 POSTHOOK: Output: default@bucketmapjoin_tmp_result
-POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: bucketmapjoin_tmp_result.key EXPRESSION [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out
index 1768e72..fc95d13 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out
@@ -286,7 +286,7 @@ STAGE PLANS:
                       Statistics: Num rows: 121 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
                       BucketMapJoin: true
                       Select Operator
-                        expressions: _col0 (type: int), _col1 (type: string), _col6 (type: string)
+                        expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col6 (type: string)
                         outputColumnNames: _col0, _col1, _col2
                         Statistics: Num rows: 121 Data size: 12786 Basic stats: COMPLETE Column stats: NONE
                         File Output Operator
@@ -483,7 +483,7 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part
 POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08
 POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-09
 POSTHOOK: Output: default@bucketmapjoin_tmp_result
-POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: bucketmapjoin_tmp_result.key EXPRESSION [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result
@@ -528,7 +528,7 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part
 POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08
 POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-09
 POSTHOOK: Output: default@bucketmapjoin_tmp_result
-POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: bucketmapjoin_tmp_result.key EXPRESSION [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result
@@ -692,7 +692,7 @@ STAGE PLANS:
                       Statistics: Num rows: 63 Data size: 6736 Basic stats: COMPLETE Column stats: NONE
                       BucketMapJoin: true
                       Select Operator
-                        expressions: _col0 (type: int), _col1 (type: string), _col6 (type: string)
+                        expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col6 (type: string)
                         outputColumnNames: _col0, _col1, _col2
                         Statistics: Num rows: 63 Data size: 6736 Basic stats: COMPLETE Column stats: NONE
                         File Output Operator
@@ -889,7 +889,7 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part_2
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-09
 POSTHOOK: Output: default@bucketmapjoin_tmp_result
-POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: bucketmapjoin_tmp_result.key EXPRESSION [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part_2)b.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result
@@ -934,7 +934,7 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part_2
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-09
 POSTHOOK: Output: default@bucketmapjoin_tmp_result
-POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: bucketmapjoin_tmp_result.key EXPRESSION [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part_2)b.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out
index b2127f3..f4537ff 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out
@@ -183,7 +183,7 @@ STAGE PLANS:
                       Position of Big Table: 0
                       Statistics: Num rows: 44 Data size: 4620 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
-                        expressions: _col0 (type: int), _col1 (type: string), _col6 (type: string)
+                        expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col6 (type: string)
                         outputColumnNames: _col0, _col1, _col2
                         Statistics: Num rows: 44 Data size: 4620 Basic stats: COMPLETE Column stats: NONE
                         File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out
index 3838421..50a726e 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out
@@ -247,7 +247,7 @@ STAGE PLANS:
                       Statistics: Num rows: 63 Data size: 6736 Basic stats: COMPLETE Column stats: NONE
                       BucketMapJoin: true
                       Select Operator
-                        expressions: _col0 (type: int), _col1 (type: string), _col6 (type: string)
+                        expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col6 (type: string)
                         outputColumnNames: _col0, _col1, _col2
                         Statistics: Num rows: 63 Data size: 6736 Basic stats: COMPLETE Column stats: NONE
                         File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
index 126838e..72e581e 100644
--- a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
+++ b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
@@ -217,7 +217,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -237,7 +237,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out b/ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out
index 6a2396e..26ce0ed 100644
--- a/ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out
+++ b/ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out
@@ -44,9 +44,9 @@ POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c1 SIMPLE []
 POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c2 SIMPLE []
 POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c3 SIMPLE []
 POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c4 SIMPLE []
-POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c5 SIMPLE []
-POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c6 SIMPLE []
-POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c7 SIMPLE []
+POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c5 EXPRESSION []
+POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c6 EXPRESSION []
+POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c7 EXPRESSION []
 PREHOOK: query: insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -64,9 +64,9 @@ POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c13 SIMPLE []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c14 SIMPLE []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c15 SIMPLE []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c16 SIMPLE []
-POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c17 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c17 EXPRESSION []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c18 SIMPLE []
-POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c19 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c19 EXPRESSION []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c2 SIMPLE []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c20 SIMPLE []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c21 SIMPLE []
@@ -74,9 +74,9 @@ POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c22 SIMPLE []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c23 SIMPLE []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c24 SIMPLE []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c25 SIMPLE []
-POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c3 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c3 EXPRESSION []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c4 SIMPLE []
-POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c5 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c5 EXPRESSION []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c6 SIMPLE []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c7 SIMPLE []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c8 SIMPLE []
@@ -118,18 +118,18 @@ POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c2 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c20 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c21 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c22 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c23 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c24 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c25 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c26 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c27 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c28 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c23 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c24 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c25 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c26 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c27 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c28 EXPRESSION []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c29 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c3 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c30 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c31 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c32 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c33 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c33 EXPRESSION []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c34 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c35 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c36 SIMPLE []
@@ -157,18 +157,18 @@ POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c55 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c56 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c57 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c58 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c59 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c59 EXPRESSION []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c6 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c60 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c61 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c62 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c63 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c64 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c60 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c61 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c62 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c63 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c64 EXPRESSION []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c65 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c66 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c66 EXPRESSION []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c67 EXPRESSION []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c68 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c69 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c69 EXPRESSION []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c7 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c70 EXPRESSION []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c71 EXPRESSION []
@@ -181,7 +181,7 @@ POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c77 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c78 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c79 SIMPLE []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c8 SIMPLE []
-POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c80 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c80 EXPRESSION []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c81 EXPRESSION []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c82 EXPRESSION []
 POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c83 EXPRESSION []

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby5.q.out b/ql/src/test/results/clientpositive/spark/groupby5.q.out
index 41b46f8..d292f74 100644
--- a/ql/src/test/results/clientpositive/spark/groupby5.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby5.q.out
@@ -69,7 +69,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby5_noskew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby5_noskew.q.out b/ql/src/test/results/clientpositive/spark/groupby5_noskew.q.out
index ccfd32a..300ccb6 100644
--- a/ql/src/test/results/clientpositive/spark/groupby5_noskew.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby5_noskew.q.out
@@ -54,7 +54,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby7_map.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby7_map.q.out b/ql/src/test/results/clientpositive/spark/groupby7_map.q.out
index 88b6304..31daab8 100644
--- a/ql/src/test/results/clientpositive/spark/groupby7_map.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby7_map.q.out
@@ -69,7 +69,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -89,7 +89,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out b/ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out
index 456dda1..625a737 100644
--- a/ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out
@@ -64,7 +64,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
@@ -82,7 +82,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby7_map_skew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby7_map_skew.q.out b/ql/src/test/results/clientpositive/spark/groupby7_map_skew.q.out
index 7bf7bbd..4fbfd30 100644
--- a/ql/src/test/results/clientpositive/spark/groupby7_map_skew.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby7_map_skew.q.out
@@ -84,7 +84,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -104,7 +104,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby7_noskew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby7_noskew.q.out b/ql/src/test/results/clientpositive/spark/groupby7_noskew.q.out
index 89fea83..a26247a 100644
--- a/ql/src/test/results/clientpositive/spark/groupby7_noskew.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby7_noskew.q.out
@@ -63,7 +63,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -83,7 +83,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out b/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out
index 84948ac..2dce301 100644
--- a/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out
@@ -65,7 +65,7 @@ STAGE PLANS:
                   Number of rows: 10
                   Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
@@ -86,7 +86,7 @@ STAGE PLANS:
                   Number of rows: 10
                   Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby8.q.out b/ql/src/test/results/clientpositive/spark/groupby8.q.out
index 3158bc7..f7f3279 100644
--- a/ql/src/test/results/clientpositive/spark/groupby8.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby8.q.out
@@ -77,7 +77,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -97,7 +97,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -850,7 +850,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -870,7 +870,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby8_map.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby8_map.q.out b/ql/src/test/results/clientpositive/spark/groupby8_map.q.out
index 662f505..288ca3f 100644
--- a/ql/src/test/results/clientpositive/spark/groupby8_map.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby8_map.q.out
@@ -63,7 +63,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
@@ -81,7 +81,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby8_map_skew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby8_map_skew.q.out b/ql/src/test/results/clientpositive/spark/groupby8_map_skew.q.out
index ac6a87b..9e76fd5 100644
--- a/ql/src/test/results/clientpositive/spark/groupby8_map_skew.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby8_map_skew.q.out
@@ -83,7 +83,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -103,7 +103,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby8_noskew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby8_noskew.q.out b/ql/src/test/results/clientpositive/spark/groupby8_noskew.q.out
index 662f505..288ca3f 100644
--- a/ql/src/test/results/clientpositive/spark/groupby8_noskew.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby8_noskew.q.out
@@ -63,7 +63,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
@@ -81,7 +81,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby9.q.out b/ql/src/test/results/clientpositive/spark/groupby9.q.out
index 96f812f..7095f21 100644
--- a/ql/src/test/results/clientpositive/spark/groupby9.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby9.q.out
@@ -88,7 +88,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -108,7 +108,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), _col2 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -873,7 +873,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -893,7 +893,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), _col2 (type: bigint)
+                  expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), UDFToString(_col2) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -1658,7 +1658,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -1678,7 +1678,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), _col2 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -2445,7 +2445,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -2465,7 +2465,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), _col2 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -3230,7 +3230,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -3250,7 +3250,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), _col2 (type: bigint)
+                  expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), UDFToString(_col2) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/groupby_position.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_position.q.out b/ql/src/test/results/clientpositive/spark/groupby_position.q.out
index 24be0dc..e68c435 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_position.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_position.q.out
@@ -86,7 +86,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -106,7 +106,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), _col2 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -281,7 +281,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: bigint)
+                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -301,7 +301,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), _col2 (type: bigint)
+                  expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), UDFToString(_col2) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out b/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
index 3152663..356f625 100644
--- a/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
+++ b/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
@@ -77,14 +77,18 @@ STAGE PLANS:
                       mode: final
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                      File Output Operator
-                        compressed: false
+                      Select Operator
+                        expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                        outputColumnNames: _col0, _col1
                         Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                        table:
-                            input format: org.apache.hadoop.mapred.TextInputFormat
-                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                            name: default.test_table_out
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              name: default.test_table_out
 
   Stage: Stage-0
     Move Operator
@@ -232,14 +236,18 @@ STAGE PLANS:
                   1 UDFToDouble(_col0) (type: double)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
+                Select Operator
+                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  outputColumnNames: _col0, _col1
                   Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.test_table_out
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.test_table_out
 
   Stage: Stage-0
     Move Operator
@@ -488,14 +496,18 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
+                Select Operator
+                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  outputColumnNames: _col0, _col1
                   Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.test_table_out
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.test_table_out
 
   Stage: Stage-0
     Move Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/join38.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join38.q.out b/ql/src/test/results/clientpositive/spark/join38.q.out
index 244928a..7e4b790 100644
--- a/ql/src/test/results/clientpositive/spark/join38.q.out
+++ b/ql/src/test/results/clientpositive/spark/join38.q.out
@@ -15,17 +15,17 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@tmp
 POSTHOOK: Lineage: tmp.col0 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: tmp.col1 SIMPLE []
-POSTHOOK: Lineage: tmp.col10 SIMPLE []
-POSTHOOK: Lineage: tmp.col11 SIMPLE []
-POSTHOOK: Lineage: tmp.col2 SIMPLE []
-POSTHOOK: Lineage: tmp.col3 SIMPLE []
-POSTHOOK: Lineage: tmp.col4 SIMPLE []
-POSTHOOK: Lineage: tmp.col5 SIMPLE []
-POSTHOOK: Lineage: tmp.col6 SIMPLE []
-POSTHOOK: Lineage: tmp.col7 SIMPLE []
-POSTHOOK: Lineage: tmp.col8 SIMPLE []
-POSTHOOK: Lineage: tmp.col9 SIMPLE []
+POSTHOOK: Lineage: tmp.col1 EXPRESSION []
+POSTHOOK: Lineage: tmp.col10 EXPRESSION []
+POSTHOOK: Lineage: tmp.col11 EXPRESSION []
+POSTHOOK: Lineage: tmp.col2 EXPRESSION []
+POSTHOOK: Lineage: tmp.col3 EXPRESSION []
+POSTHOOK: Lineage: tmp.col4 EXPRESSION []
+POSTHOOK: Lineage: tmp.col5 EXPRESSION []
+POSTHOOK: Lineage: tmp.col6 EXPRESSION []
+POSTHOOK: Lineage: tmp.col7 EXPRESSION []
+POSTHOOK: Lineage: tmp.col8 EXPRESSION []
+POSTHOOK: Lineage: tmp.col9 EXPRESSION []
 PREHOOK: query: select * from tmp
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tmp

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out b/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out
index dd3fa50..4adb969 100644
--- a/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out
+++ b/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out
@@ -71,7 +71,7 @@ STAGE PLANS:
                         outputColumnNames: _col0, _col5
                         Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: string), _col5 (type: double)
+                          expressions: _col0 (type: string), UDFToString(_col5) (type: string)
                           outputColumnNames: _col0, _col1
                           Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                           File Output Operator
@@ -93,7 +93,7 @@ STAGE PLANS:
                           outputColumnNames: _col0, _col5
                           Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                           Select Operator
-                            expressions: _col0 (type: string), _col5 (type: double)
+                            expressions: _col0 (type: string), UDFToString(_col5) (type: string)
                             outputColumnNames: _col0, _col1
                             Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                             File Output Operator
@@ -114,7 +114,7 @@ STAGE PLANS:
                         outputColumnNames: _col0, _col5
                         Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: string), _col5 (type: double)
+                          expressions: _col0 (type: string), UDFToString(_col5) (type: string)
                           outputColumnNames: _col0, _col1
                           Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                           File Output Operator
@@ -136,7 +136,7 @@ STAGE PLANS:
                           outputColumnNames: _col0, _col5
                           Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                           Select Operator
-                            expressions: _col0 (type: string), _col5 (type: double)
+                            expressions: _col0 (type: string), UDFToString(_col5) (type: string)
                             outputColumnNames: _col0, _col1
                             Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                             File Output Operator
@@ -377,14 +377,18 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
+                Select Operator
+                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_lv1
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.src_lv1
         Reducer 3 
             Reduce Operator Tree:
               Group By Operator
@@ -393,14 +397,18 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
+                Select Operator
+                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_lv2
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.src_lv2
 
   Stage: Stage-0
     Move Operator
@@ -581,14 +589,18 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
+                Select Operator
+                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_lv1
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.src_lv1
         Reducer 3 
             Reduce Operator Tree:
               Forward
@@ -602,14 +614,18 @@ STAGE PLANS:
                     mode: complete
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
+                    Select Operator
+                      expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
-                      table:
-                          input format: org.apache.hadoop.mapred.TextInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                          name: default.src_lv2
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.src_lv2
                 Filter Operator
                   predicate: (KEY._col0 < 200) (type: boolean)
                   Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE
@@ -619,14 +635,18 @@ STAGE PLANS:
                     mode: complete
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
+                    Select Operator
+                      expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
-                      table:
-                          input format: org.apache.hadoop.mapred.TextInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                          name: default.src_lv3
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.src_lv3
 
   Stage: Stage-0
     Move Operator
@@ -901,14 +921,18 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
+                Select Operator
+                  expressions: UDFToString(_col0) (type: string), UDFToString(_col1) (type: string)
+                  outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_lv1
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.src_lv1
         Reducer 3 
             Reduce Operator Tree:
               Group By Operator
@@ -917,14 +941,18 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
+                Select Operator
+                  expressions: UDFToString(_col0) (type: string), UDFToString(_col1) (type: string)
+                  outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_lv2
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.src_lv2
         Reducer 4 
             Reduce Operator Tree:
               Group By Operator
@@ -933,14 +961,18 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 5 Data size: 52 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
+                Select Operator
+                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  outputColumnNames: _col0, _col1
                   Statistics: Num rows: 5 Data size: 52 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_lv3
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 5 Data size: 52 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.src_lv3
 
   Stage: Stage-0
     Move Operator
@@ -1234,14 +1266,18 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
+                Select Operator
+                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_lv1
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.src_lv1
         Reducer 3 
             Reduce Operator Tree:
               Group By Operator
@@ -1250,14 +1286,18 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
+                Select Operator
+                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_lv2
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.src_lv2
         Reducer 4 
             Reduce Operator Tree:
               Forward
@@ -1271,14 +1311,18 @@ STAGE PLANS:
                     mode: complete
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
+                    Select Operator
+                      expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
-                      table:
-                          input format: org.apache.hadoop.mapred.TextInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                          name: default.src_lv3
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.src_lv3
                 Filter Operator
                   predicate: (KEY._col1:0._col0 < 200) (type: boolean)
                   Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE
@@ -1288,14 +1332,18 @@ STAGE PLANS:
                     mode: complete
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
+                    Select Operator
+                      expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
-                      table:
-                          input format: org.apache.hadoop.mapred.TextInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                          name: default.src_lv4
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.src_lv4
 
   Stage: Stage-0
     Move Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/multi_insert_mixed.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/multi_insert_mixed.q.out b/ql/src/test/results/clientpositive/spark/multi_insert_mixed.q.out
index 9d90d93..2b28d53 100644
--- a/ql/src/test/results/clientpositive/spark/multi_insert_mixed.q.out
+++ b/ql/src/test/results/clientpositive/spark/multi_insert_mixed.q.out
@@ -126,7 +126,7 @@ STAGE PLANS:
         Reducer 3 
             Reduce Operator Tree:
               Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: bigint)
+                expressions: KEY.reducesinkkey0 (type: string), UDFToString(VALUE._col0) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
@@ -153,7 +153,7 @@ STAGE PLANS:
         Reducer 5 
             Reduce Operator Tree:
               Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: bigint)
+                expressions: KEY.reducesinkkey0 (type: string), UDFToString(VALUE._col0) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/smb_mapjoin_20.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/smb_mapjoin_20.q.out b/ql/src/test/results/clientpositive/spark/smb_mapjoin_20.q.out
index a5c1d78..f35a33d 100644
--- a/ql/src/test/results/clientpositive/spark/smb_mapjoin_20.q.out
+++ b/ql/src/test/results/clientpositive/spark/smb_mapjoin_20.q.out
@@ -68,7 +68,7 @@ STAGE PLANS:
         Reducer 2 
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: int), VALUE._col1 (type: string), VALUE._col1 (type: string)
+                expressions: UDFToString(VALUE._col0) (type: string), VALUE._col1 (type: string), VALUE._col1 (type: string)
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
@@ -107,7 +107,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@test_table1
 POSTHOOK: Input: default@test_table1@ds=1
 POSTHOOK: Output: default@test_table2@ds=1
-POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value1 SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ]
 POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value2 SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ]
 PREHOOK: query: select count(*) from test_table2 where ds = '1'
@@ -308,7 +308,7 @@ STAGE PLANS:
         Reducer 2 
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: int), VALUE._col1 (type: string), VALUE._col1 (type: string)
+                expressions: UDFToString(VALUE._col0) (type: string), VALUE._col1 (type: string), VALUE._col1 (type: string)
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/23ac04d3/ql/src/test/results/clientpositive/spark/table_access_keys_stats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/table_access_keys_stats.q.out b/ql/src/test/results/clientpositive/spark/table_access_keys_stats.q.out
index 90d4cbb..a4ceff6 100644
--- a/ql/src/test/results/clientpositive/spark/table_access_keys_stats.q.out
+++ b/ql/src/test/results/clientpositive/spark/table_access_keys_stats.q.out
@@ -252,7 +252,7 @@ Operator:GBY_2
 Table:default@t1
 Keys:key
 
-Operator:GBY_8
+Operator:GBY_9
 Table:default@t1
 Keys:key