You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2018/07/24 19:40:21 UTC

[01/11] hive git commit: HIVE-19891 : inserting into external tables with custom partition directories may cause data loss (Sergey Shelukhin, reviewed by Ashutosh Chauhan)

Repository: hive
Updated Branches:
  refs/heads/master-txnstats 9f9ae73cc -> 5e7a8b59c


HIVE-19891 : inserting into external tables with custom partition directories may cause data loss (Sergey Shelukhin, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4e9562f1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4e9562f1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4e9562f1

Branch: refs/heads/master-txnstats
Commit: 4e9562f1e7a0baeae6b5e0ead8f54a43f3196f5b
Parents: 5e7aa09
Author: sergey <se...@apache.org>
Authored: Mon Jul 23 15:55:55 2018 -0700
Committer: sergey <se...@apache.org>
Committed: Mon Jul 23 15:56:25 2018 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/exec/MoveTask.java    |   2 +-
 .../apache/hadoop/hive/ql/metadata/Hive.java    |  22 ++-
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |   1 +
 .../hadoop/hive/ql/plan/LoadTableDesc.java      |  10 +-
 .../queries/clientpositive/external_insert.q    |  14 ++
 .../clientpositive/external_insert.q.out        | 158 +++++++++++++++++++
 6 files changed, 198 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/4e9562f1/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
index bf7749d..322207d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
@@ -482,7 +482,7 @@ public class MoveTask extends Task<MoveWork> implements Serializable {
 
     db.loadPartition(tbd.getSourcePath(), db.getTable(tbd.getTable().getTableName()),
         tbd.getPartitionSpec(), tbd.getLoadFileType(), tbd.getInheritTableSpecs(),
-        isSkewedStoredAsDirs(tbd), work.isSrcLocal(),
+        tbd.getInheritLocation(), isSkewedStoredAsDirs(tbd), work.isSrcLocal(),
          work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID &&
             !tbd.isMmTable(),
          hasFollowingStatsTask(),

http://git-wip-us.apache.org/repos/asf/hive/blob/4e9562f1/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 1fe1fb6..fdb4fa2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -1717,7 +1717,8 @@ public class Hive {
    * @return Partition object being loaded with data
    */
   public Partition loadPartition(Path loadPath, Table tbl, Map<String, String> partSpec,
-      LoadFileType loadFileType, boolean inheritTableSpecs, boolean isSkewedStoreAsSubdir,
+      LoadFileType loadFileType, boolean inheritTableSpecs, boolean inheritLocation,
+      boolean isSkewedStoreAsSubdir,
       boolean isSrcLocal, boolean isAcidIUDoperation, boolean hasFollowingStatsTask, Long writeId,
       int stmtId, boolean isInsertOverwrite) throws HiveException {
     Path tblDataLocationPath =  tbl.getDataLocation();
@@ -1741,10 +1742,8 @@ public class Hive {
       Path oldPartPath = (oldPart != null) ? oldPart.getDataLocation() : null;
       Path newPartPath = null;
 
-      if (inheritTableSpecs) {
-        Path partPath = new Path(tbl.getDataLocation(), Warehouse.makePartPath(partSpec));
-        newPartPath = new Path(tblDataLocationPath.toUri().getScheme(),
-            tblDataLocationPath.toUri().getAuthority(), partPath.toUri().getPath());
+      if (inheritLocation) {
+        newPartPath = genPartPathFromTable(tbl, partSpec, tblDataLocationPath);
 
         if(oldPart != null) {
           /*
@@ -1761,7 +1760,8 @@ public class Hive {
           }
         }
       } else {
-        newPartPath = oldPartPath;
+        newPartPath = oldPartPath == null
+          ? newPartPath = genPartPathFromTable(tbl, partSpec, tblDataLocationPath) : oldPartPath;
       }
       List<Path> newFiles = Collections.synchronizedList(new ArrayList<Path>());
 
@@ -1940,6 +1940,14 @@ public class Hive {
     }
   }
 
+
+  private static Path genPartPathFromTable(Table tbl, Map<String, String> partSpec,
+      Path tblDataLocationPath) throws MetaException {
+    Path partPath = new Path(tbl.getDataLocation(), Warehouse.makePartPath(partSpec));
+    return new Path(tblDataLocationPath.toUri().getScheme(),
+        tblDataLocationPath.toUri().getAuthority(), partPath.toUri().getPath());
+  }
+
   /**
    * Load Data commands for fullAcid tables write to base_x (if there is overwrite clause) or
    * delta_x_x directory - same as any other Acid write.  This method modifies the destPath to add
@@ -2262,7 +2270,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
 
               // load the partition
               Partition newPartition = loadPartition(partPath, tbl, fullPartSpec, loadFileType,
-                  true, numLB > 0, false, isAcid, hasFollowingStatsTask, writeId, stmtId,
+                  true, false, numLB > 0, false, isAcid, hasFollowingStatsTask, writeId, stmtId,
                   isInsertOverwrite);
               partitionsMap.put(fullPartSpec, newPartition);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4e9562f1/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 9373df6..2007e13 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -2094,6 +2094,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
       LoadTableDesc ltd = new LoadTableDesc(queryTmpdir, tblDesc,
           partSpec == null ? new HashMap<>() : partSpec);
       ltd.setLbCtx(lbCtx);
+      ltd.setInheritTableSpecs(true);
       Task<MoveWork> moveTsk =
           TaskFactory.get(new MoveWork(null, null, ltd, null, false));
       mergeTask.addDependentTask(moveTsk);

http://git-wip-us.apache.org/repos/asf/hive/blob/4e9562f1/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
index f15b3c3..af2ece4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
@@ -38,6 +38,7 @@ public class LoadTableDesc extends LoadDesc implements Serializable {
   private ListBucketingCtx lbCtx;
   private boolean inheritTableSpecs = true; //For partitions, flag controlling whether the current
                                             //table specs are to be used
+  private boolean inheritLocation = false; // A silly setting.
   private int stmtId;
   private Long currentWriteId;
   private boolean isInsertOverwrite;
@@ -71,6 +72,7 @@ public class LoadTableDesc extends LoadDesc implements Serializable {
     this.dpCtx = o.dpCtx;
     this.lbCtx = o.lbCtx;
     this.inheritTableSpecs = o.inheritTableSpecs;
+    this.inheritLocation = o.inheritLocation;
     this.currentWriteId = o.currentWriteId;
     this.table = o.table;
     this.partitionSpec = o.partitionSpec;
@@ -207,8 +209,14 @@ public class LoadTableDesc extends LoadDesc implements Serializable {
     return inheritTableSpecs;
   }
 
+  public boolean getInheritLocation() {
+    return inheritLocation;
+  }
+
   public void setInheritTableSpecs(boolean inheritTableSpecs) {
-    this.inheritTableSpecs = inheritTableSpecs;
+    // Set inheritLocation if this is set to true explicitly.
+    // TODO: Who actually needs this? Might just be some be pointless legacy code.
+    this.inheritTableSpecs = inheritLocation = inheritTableSpecs;
   }
 
   public boolean isInsertOverwrite() {

http://git-wip-us.apache.org/repos/asf/hive/blob/4e9562f1/ql/src/test/queries/clientpositive/external_insert.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/external_insert.q b/ql/src/test/queries/clientpositive/external_insert.q
new file mode 100644
index 0000000..9a62609
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/external_insert.q
@@ -0,0 +1,14 @@
+drop table tbl1;
+
+-- tbl1 is only used to create a directory with data
+CREATE TABLE tbl1 (index int, value int) LOCATION 'file:${system:test.tmp.dir}/external_insert';
+insert into tbl1 VALUES (2, 2);
+
+CREATE external TABLE tbl2 (index int, value int ) PARTITIONED BY ( created_date string );
+ALTER TABLE tbl2 ADD PARTITION(created_date='2018-02-01');
+ALTER TABLE tbl2 PARTITION(created_date='2018-02-01') SET LOCATION 'file:${system:test.tmp.dir}/external_insert';
+select * from tbl2;
+describe formatted tbl2 partition(created_date='2018-02-01');
+insert into tbl2 partition(created_date='2018-02-01') VALUES (1, 1);
+select * from tbl2;
+describe formatted tbl2 partition(created_date='2018-02-01');

http://git-wip-us.apache.org/repos/asf/hive/blob/4e9562f1/ql/src/test/results/clientpositive/external_insert.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/external_insert.q.out b/ql/src/test/results/clientpositive/external_insert.q.out
new file mode 100644
index 0000000..fbec406
--- /dev/null
+++ b/ql/src/test/results/clientpositive/external_insert.q.out
@@ -0,0 +1,158 @@
+PREHOOK: query: drop table tbl1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tbl1
+POSTHOOK: type: DROPTABLE
+#### A masked pattern was here ####
+PREHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tbl1
+#### A masked pattern was here ####
+POSTHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tbl1
+PREHOOK: query: insert into tbl1 VALUES (2, 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tbl1
+POSTHOOK: query: insert into tbl1 VALUES (2, 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tbl1
+POSTHOOK: Lineage: tbl1.index SCRIPT []
+POSTHOOK: Lineage: tbl1.value SCRIPT []
+PREHOOK: query: CREATE external TABLE tbl2 (index int, value int ) PARTITIONED BY ( created_date string )
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tbl2
+POSTHOOK: query: CREATE external TABLE tbl2 (index int, value int ) PARTITIONED BY ( created_date string )
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tbl2
+PREHOOK: query: ALTER TABLE tbl2 ADD PARTITION(created_date='2018-02-01')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@tbl2
+POSTHOOK: query: ALTER TABLE tbl2 ADD PARTITION(created_date='2018-02-01')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@tbl2
+POSTHOOK: Output: default@tbl2@created_date=2018-02-01
+#### A masked pattern was here ####
+PREHOOK: type: ALTERPARTITION_LOCATION
+PREHOOK: Input: default@tbl2
+PREHOOK: Output: default@tbl2@created_date=2018-02-01
+#### A masked pattern was here ####
+POSTHOOK: type: ALTERPARTITION_LOCATION
+POSTHOOK: Input: default@tbl2
+POSTHOOK: Input: default@tbl2@created_date=2018-02-01
+POSTHOOK: Output: default@tbl2@created_date=2018-02-01
+#### A masked pattern was here ####
+PREHOOK: query: select * from tbl2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tbl2
+PREHOOK: Input: default@tbl2@created_date=2018-02-01
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tbl2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tbl2
+POSTHOOK: Input: default@tbl2@created_date=2018-02-01
+#### A masked pattern was here ####
+2	2	2018-02-01
+PREHOOK: query: describe formatted tbl2 partition(created_date='2018-02-01')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@tbl2
+POSTHOOK: query: describe formatted tbl2 partition(created_date='2018-02-01')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@tbl2
+# col_name            	data_type           	comment             
+index               	int                 	                    
+value               	int                 	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+created_date        	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2018-02-01]        	 
+Database:           	default             	 
+Table:              	tbl2                	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"index\":\"true\",\"value\":\"true\"}}
+#### A masked pattern was here ####
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: insert into tbl2 partition(created_date='2018-02-01') VALUES (1, 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tbl2@created_date=2018-02-01
+POSTHOOK: query: insert into tbl2 partition(created_date='2018-02-01') VALUES (1, 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tbl2@created_date=2018-02-01
+POSTHOOK: Lineage: tbl2 PARTITION(created_date=2018-02-01).index SCRIPT []
+POSTHOOK: Lineage: tbl2 PARTITION(created_date=2018-02-01).value SCRIPT []
+PREHOOK: query: select * from tbl2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tbl2
+PREHOOK: Input: default@tbl2@created_date=2018-02-01
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tbl2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tbl2
+POSTHOOK: Input: default@tbl2@created_date=2018-02-01
+#### A masked pattern was here ####
+2	2	2018-02-01
+1	1	2018-02-01
+PREHOOK: query: describe formatted tbl2 partition(created_date='2018-02-01')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@tbl2
+POSTHOOK: query: describe formatted tbl2 partition(created_date='2018-02-01')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@tbl2
+# col_name            	data_type           	comment             
+index               	int                 	                    
+value               	int                 	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+created_date        	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2018-02-01]        	 
+Database:           	default             	 
+Table:              	tbl2                	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"index\":\"true\",\"value\":\"true\"}}
+#### A masked pattern was here ####
+	numFiles            	2                   
+	numRows             	1                   
+	rawDataSize         	3                   
+	totalSize           	8                   
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   


[03/11] hive git commit: HIVE-20164 : Murmur Hash : Make sure CTAS and IAS use correct bucketing version (Deepak Jaiswal, reviewed by Jason Dere)

Posted by se...@apache.org.
HIVE-20164 : Murmur Hash : Make sure CTAS and IAS use correct bucketing version (Deepak Jaiswal, reviewed by Jason Dere)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/87b9f647
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/87b9f647
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/87b9f647

Branch: refs/heads/master-txnstats
Commit: 87b9f647b0478b587019e8c94454d33dae970e19
Parents: ed4fa73
Author: Deepak Jaiswal <dj...@apache.org>
Authored: Mon Jul 23 20:38:40 2018 -0700
Committer: Deepak Jaiswal <dj...@apache.org>
Committed: Mon Jul 23 20:38:40 2018 -0700

----------------------------------------------------------------------
 .../test/resources/testconfiguration.properties |   1 +
 .../hadoop/hive/ql/parse/TezCompiler.java       |  42 ++
 .../apache/hadoop/hive/ql/plan/TableDesc.java   |   3 +
 .../clientpositive/murmur_hash_migration.q      |  61 ++
 .../llap/murmur_hash_migration.q.out            | 618 +++++++++++++++++++
 5 files changed, 725 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/87b9f647/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index 654185d..b5ae390 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -588,6 +588,7 @@ minillaplocal.query.files=\
   mrr.q,\
   multiMapJoin1.q,\
   multiMapJoin2.q,\
+  murmur_hash_migration.q,\
   non_native_window_udf.q,\
   optimize_join_ptp.q,\
   orc_analyze.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/87b9f647/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
index 1661aec..c3eb886 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
@@ -164,6 +164,9 @@ public class TezCompiler extends TaskCompiler {
     runStatsAnnotation(procCtx);
     perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.TEZ_COMPILER, "Setup stats in the operator plan");
 
+    // Update bucketing version of ReduceSinkOp if needed
+    updateBucketingVersionForUpgrade(procCtx);
+
     perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.TEZ_COMPILER);
     // run the optimizations that use stats for optimization
     runStatsDependentOptimizations(procCtx, inputs, outputs);
@@ -1690,4 +1693,43 @@ public class TezCompiler extends TaskCompiler {
       }
     }
   }
+
+  private void updateBucketingVersionForUpgrade(OptimizeTezProcContext procCtx) {
+    // Fetch all the FileSinkOperators.
+    Set<FileSinkOperator> fsOpsAll = new HashSet<>();
+    for (TableScanOperator ts : procCtx.parseContext.getTopOps().values()) {
+      Set<FileSinkOperator> fsOps = OperatorUtils.findOperators(
+          ts, FileSinkOperator.class);
+      fsOpsAll.addAll(fsOps);
+    }
+
+
+    for (FileSinkOperator fsOp : fsOpsAll) {
+      Operator<?> parentOfFS = fsOp.getParentOperators().get(0);
+      if (parentOfFS instanceof GroupByOperator) {
+        GroupByOperator gbyOp = (GroupByOperator) parentOfFS;
+        List<String> aggs = gbyOp.getConf().getAggregatorStrings();
+        boolean compute_stats = false;
+        for (String agg : aggs) {
+          if (agg.equalsIgnoreCase("compute_stats")) {
+            compute_stats = true;
+            break;
+          }
+        }
+        if (compute_stats) {
+          continue;
+        }
+      }
+
+      // Not compute_stats
+      Set<ReduceSinkOperator> rsOps = OperatorUtils.findOperatorsUpstream(parentOfFS, ReduceSinkOperator.class);
+      if (rsOps.isEmpty()) {
+        continue;
+      }
+      // Skip setting if the bucketing version is not set in FileSinkOp.
+      if (fsOp.getConf().getTableInfo().isSetBucketingVersion()) {
+        rsOps.iterator().next().setBucketingVersion(fsOp.getConf().getTableInfo().getBucketingVersion());
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/87b9f647/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
index bbce940..b73faa5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
@@ -183,6 +183,9 @@ public class TableDesc implements Serializable, Cloneable {
     return (properties.getProperty(hive_metastoreConstants.META_TABLE_STORAGE) != null);
   }
 
+  public boolean isSetBucketingVersion() {
+    return properties.getProperty(hive_metastoreConstants.TABLE_BUCKETING_VERSION) != null;
+  }
   public int getBucketingVersion() {
     return Utilities.getBucketingVersion(
         properties.getProperty(hive_metastoreConstants.TABLE_BUCKETING_VERSION));

http://git-wip-us.apache.org/repos/asf/hive/blob/87b9f647/ql/src/test/queries/clientpositive/murmur_hash_migration.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/murmur_hash_migration.q b/ql/src/test/queries/clientpositive/murmur_hash_migration.q
new file mode 100644
index 0000000..2b8da9f
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/murmur_hash_migration.q
@@ -0,0 +1,61 @@
+--! qt:dataset:src
+set hive.stats.column.autogather=false;
+set hive.strict.checks.bucketing=false;
+
+set hive.mapred.mode=nonstrict;
+set hive.explain.user=false;
+set hive.auto.convert.join=true;
+set hive.auto.convert.join.noconditionaltask=true;
+set hive.auto.convert.join.noconditionaltask.size=30000;
+
+CREATE TABLE srcbucket_mapjoin_n18_stage(key int, value string) partitioned by (ds string) STORED AS TEXTFILE TBLPROPERTIES("bucketing_version" = '1');
+CREATE TABLE srcbucket_mapjoin_part_n20_stage (key int, value string) partitioned by (ds string) STORED AS TEXTFILE TBLPROPERTIES("bucketing_version" = '1');
+
+CREATE TABLE srcbucket_mapjoin_n18(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE TBLPROPERTIES("bucketing_version" = '1');
+CREATE TABLE srcbucket_mapjoin_part_n20 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE TBLPROPERTIES("bucketing_version" = '1');
+
+load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_n18_stage partition(ds='2008-04-08');
+load data local inpath '../../data/files/bmj1/000001_0' INTO TABLE srcbucket_mapjoin_n18_stage partition(ds='2008-04-08');
+
+load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_part_n20_stage partition(ds='2008-04-08');
+load data local inpath '../../data/files/bmj/000001_0' INTO TABLE srcbucket_mapjoin_part_n20_stage partition(ds='2008-04-08');
+load data local inpath '../../data/files/bmj/000002_0' INTO TABLE srcbucket_mapjoin_part_n20_stage partition(ds='2008-04-08');
+load data local inpath '../../data/files/bmj/000003_0' INTO TABLE srcbucket_mapjoin_part_n20_stage partition(ds='2008-04-08');
+
+set hive.optimize.bucketingsorting=false;
+
+
+insert overwrite table srcbucket_mapjoin_n18  partition (ds='2008-04-08')
+select key,value from srcbucket_mapjoin_n18_stage limit 150;
+
+insert overwrite table srcbucket_mapjoin_part_n20 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_part_n20_stage limit 150;
+
+analyze table srcbucket_mapjoin_n18 compute statistics for columns;
+analyze table srcbucket_mapjoin_part_n20 compute statistics for columns;
+
+
+CREATE TABLE tab_part_n11 (key int, value string) PARTITIONED BY(ds STRING) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
+explain
+insert overwrite table tab_part_n11 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_part_n20;
+insert overwrite table tab_part_n11 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_part_n20;
+
+CREATE TABLE tab_n10(key int, value string) PARTITIONED BY(ds STRING) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
+explain
+insert overwrite table tab_n10 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_n18;
+insert overwrite table tab_n10 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_n18;
+
+analyze table tab_part_n11 compute statistics for columns;
+analyze table tab_n10 compute statistics for columns;
+
+explain
+select t1.key, t1.value, t2.key, t2.value from srcbucket_mapjoin_n18 t1, srcbucket_mapjoin_part_n20 t2 where t1.key = t2.key order by t1.key, t1.value, t2.key, t2.value;
+select t1.key, t1.value, t2.key, t2.value from srcbucket_mapjoin_n18 t1, srcbucket_mapjoin_part_n20 t2 where t1.key = t2.key order by t1.key, t1.value, t2.key, t2.value;
+
+explain
+select t1.key, t1.value, t2.key, t2.value from tab_part_n11 t1, tab_n10 t2 where t1.key = t2.key order by t1.key, t1.value, t2.key, t2.value;
+select t1.key, t1.value, t2.key, t2.value from tab_part_n11 t1, tab_n10 t2 where t1.key = t2.key order by t1.key, t1.value, t2.key, t2.value;

http://git-wip-us.apache.org/repos/asf/hive/blob/87b9f647/ql/src/test/results/clientpositive/llap/murmur_hash_migration.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/murmur_hash_migration.q.out b/ql/src/test/results/clientpositive/llap/murmur_hash_migration.q.out
new file mode 100644
index 0000000..be5b5f7
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/murmur_hash_migration.q.out
@@ -0,0 +1,618 @@
+PREHOOK: query: CREATE TABLE srcbucket_mapjoin_n18_stage(key int, value string) partitioned by (ds string) STORED AS TEXTFILE TBLPROPERTIES("bucketing_version" = '1')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@srcbucket_mapjoin_n18_stage
+POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_n18_stage(key int, value string) partitioned by (ds string) STORED AS TEXTFILE TBLPROPERTIES("bucketing_version" = '1')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@srcbucket_mapjoin_n18_stage
+PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_n20_stage (key int, value string) partitioned by (ds string) STORED AS TEXTFILE TBLPROPERTIES("bucketing_version" = '1')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@srcbucket_mapjoin_part_n20_stage
+POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_n20_stage (key int, value string) partitioned by (ds string) STORED AS TEXTFILE TBLPROPERTIES("bucketing_version" = '1')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@srcbucket_mapjoin_part_n20_stage
+PREHOOK: query: CREATE TABLE srcbucket_mapjoin_n18(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE TBLPROPERTIES("bucketing_version" = '1')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@srcbucket_mapjoin_n18
+POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_n18(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE TBLPROPERTIES("bucketing_version" = '1')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@srcbucket_mapjoin_n18
+PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_n20 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE TBLPROPERTIES("bucketing_version" = '1')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@srcbucket_mapjoin_part_n20
+POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_n20 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE TBLPROPERTIES("bucketing_version" = '1')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@srcbucket_mapjoin_part_n20
+PREHOOK: query: load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_n18_stage partition(ds='2008-04-08')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@srcbucket_mapjoin_n18_stage
+POSTHOOK: query: load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_n18_stage partition(ds='2008-04-08')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@srcbucket_mapjoin_n18_stage
+POSTHOOK: Output: default@srcbucket_mapjoin_n18_stage@ds=2008-04-08
+PREHOOK: query: load data local inpath '../../data/files/bmj1/000001_0' INTO TABLE srcbucket_mapjoin_n18_stage partition(ds='2008-04-08')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@srcbucket_mapjoin_n18_stage@ds=2008-04-08
+POSTHOOK: query: load data local inpath '../../data/files/bmj1/000001_0' INTO TABLE srcbucket_mapjoin_n18_stage partition(ds='2008-04-08')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@srcbucket_mapjoin_n18_stage@ds=2008-04-08
+PREHOOK: query: load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_part_n20_stage partition(ds='2008-04-08')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@srcbucket_mapjoin_part_n20_stage
+POSTHOOK: query: load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_part_n20_stage partition(ds='2008-04-08')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@srcbucket_mapjoin_part_n20_stage
+POSTHOOK: Output: default@srcbucket_mapjoin_part_n20_stage@ds=2008-04-08
+PREHOOK: query: load data local inpath '../../data/files/bmj/000001_0' INTO TABLE srcbucket_mapjoin_part_n20_stage partition(ds='2008-04-08')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@srcbucket_mapjoin_part_n20_stage@ds=2008-04-08
+POSTHOOK: query: load data local inpath '../../data/files/bmj/000001_0' INTO TABLE srcbucket_mapjoin_part_n20_stage partition(ds='2008-04-08')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@srcbucket_mapjoin_part_n20_stage@ds=2008-04-08
+PREHOOK: query: load data local inpath '../../data/files/bmj/000002_0' INTO TABLE srcbucket_mapjoin_part_n20_stage partition(ds='2008-04-08')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@srcbucket_mapjoin_part_n20_stage@ds=2008-04-08
+POSTHOOK: query: load data local inpath '../../data/files/bmj/000002_0' INTO TABLE srcbucket_mapjoin_part_n20_stage partition(ds='2008-04-08')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@srcbucket_mapjoin_part_n20_stage@ds=2008-04-08
+PREHOOK: query: load data local inpath '../../data/files/bmj/000003_0' INTO TABLE srcbucket_mapjoin_part_n20_stage partition(ds='2008-04-08')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@srcbucket_mapjoin_part_n20_stage@ds=2008-04-08
+POSTHOOK: query: load data local inpath '../../data/files/bmj/000003_0' INTO TABLE srcbucket_mapjoin_part_n20_stage partition(ds='2008-04-08')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@srcbucket_mapjoin_part_n20_stage@ds=2008-04-08
+PREHOOK: query: insert overwrite table srcbucket_mapjoin_n18  partition (ds='2008-04-08')
+select key,value from srcbucket_mapjoin_n18_stage limit 150
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcbucket_mapjoin_n18_stage
+PREHOOK: Input: default@srcbucket_mapjoin_n18_stage@ds=2008-04-08
+PREHOOK: Output: default@srcbucket_mapjoin_n18@ds=2008-04-08
+POSTHOOK: query: insert overwrite table srcbucket_mapjoin_n18  partition (ds='2008-04-08')
+select key,value from srcbucket_mapjoin_n18_stage limit 150
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcbucket_mapjoin_n18_stage
+POSTHOOK: Input: default@srcbucket_mapjoin_n18_stage@ds=2008-04-08
+POSTHOOK: Output: default@srcbucket_mapjoin_n18@ds=2008-04-08
+POSTHOOK: Lineage: srcbucket_mapjoin_n18 PARTITION(ds=2008-04-08).key SIMPLE [(srcbucket_mapjoin_n18_stage)srcbucket_mapjoin_n18_stage.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: srcbucket_mapjoin_n18 PARTITION(ds=2008-04-08).value SIMPLE [(srcbucket_mapjoin_n18_stage)srcbucket_mapjoin_n18_stage.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: insert overwrite table srcbucket_mapjoin_part_n20 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_part_n20_stage limit 150
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcbucket_mapjoin_part_n20_stage
+PREHOOK: Input: default@srcbucket_mapjoin_part_n20_stage@ds=2008-04-08
+PREHOOK: Output: default@srcbucket_mapjoin_part_n20@ds=2008-04-08
+POSTHOOK: query: insert overwrite table srcbucket_mapjoin_part_n20 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_part_n20_stage limit 150
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcbucket_mapjoin_part_n20_stage
+POSTHOOK: Input: default@srcbucket_mapjoin_part_n20_stage@ds=2008-04-08
+POSTHOOK: Output: default@srcbucket_mapjoin_part_n20@ds=2008-04-08
+POSTHOOK: Lineage: srcbucket_mapjoin_part_n20 PARTITION(ds=2008-04-08).key SIMPLE [(srcbucket_mapjoin_part_n20_stage)srcbucket_mapjoin_part_n20_stage.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: srcbucket_mapjoin_part_n20 PARTITION(ds=2008-04-08).value SIMPLE [(srcbucket_mapjoin_part_n20_stage)srcbucket_mapjoin_part_n20_stage.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: analyze table srcbucket_mapjoin_n18 compute statistics for columns
+PREHOOK: type: ANALYZE_TABLE
+PREHOOK: Input: default@srcbucket_mapjoin_n18
+PREHOOK: Input: default@srcbucket_mapjoin_n18@ds=2008-04-08
+PREHOOK: Output: default@srcbucket_mapjoin_n18
+PREHOOK: Output: default@srcbucket_mapjoin_n18@ds=2008-04-08
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table srcbucket_mapjoin_n18 compute statistics for columns
+POSTHOOK: type: ANALYZE_TABLE
+POSTHOOK: Input: default@srcbucket_mapjoin_n18
+POSTHOOK: Input: default@srcbucket_mapjoin_n18@ds=2008-04-08
+POSTHOOK: Output: default@srcbucket_mapjoin_n18
+POSTHOOK: Output: default@srcbucket_mapjoin_n18@ds=2008-04-08
+#### A masked pattern was here ####
+PREHOOK: query: analyze table srcbucket_mapjoin_part_n20 compute statistics for columns
+PREHOOK: type: ANALYZE_TABLE
+PREHOOK: Input: default@srcbucket_mapjoin_part_n20
+PREHOOK: Input: default@srcbucket_mapjoin_part_n20@ds=2008-04-08
+PREHOOK: Output: default@srcbucket_mapjoin_part_n20
+PREHOOK: Output: default@srcbucket_mapjoin_part_n20@ds=2008-04-08
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table srcbucket_mapjoin_part_n20 compute statistics for columns
+POSTHOOK: type: ANALYZE_TABLE
+POSTHOOK: Input: default@srcbucket_mapjoin_part_n20
+POSTHOOK: Input: default@srcbucket_mapjoin_part_n20@ds=2008-04-08
+POSTHOOK: Output: default@srcbucket_mapjoin_part_n20
+POSTHOOK: Output: default@srcbucket_mapjoin_part_n20@ds=2008-04-08
+#### A masked pattern was here ####
+PREHOOK: query: CREATE TABLE tab_part_n11 (key int, value string) PARTITIONED BY(ds STRING) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tab_part_n11
+POSTHOOK: query: CREATE TABLE tab_part_n11 (key int, value string) PARTITIONED BY(ds STRING) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tab_part_n11
+PREHOOK: query: explain
+insert overwrite table tab_part_n11 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_part_n20
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+insert overwrite table tab_part_n11 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_part_n20
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: srcbucket_mapjoin_part_n20
+                  Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: int), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      sort order: 
+                      Map-reduce partition columns: _col0 (type: int)
+                      Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: int), _col1 (type: string)
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: vectorized, llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: int), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.tab_part_n11
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.tab_part_n11
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+
+PREHOOK: query: insert overwrite table tab_part_n11 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_part_n20
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcbucket_mapjoin_part_n20
+PREHOOK: Input: default@srcbucket_mapjoin_part_n20@ds=2008-04-08
+PREHOOK: Output: default@tab_part_n11@ds=2008-04-08
+POSTHOOK: query: insert overwrite table tab_part_n11 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_part_n20
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcbucket_mapjoin_part_n20
+POSTHOOK: Input: default@srcbucket_mapjoin_part_n20@ds=2008-04-08
+POSTHOOK: Output: default@tab_part_n11@ds=2008-04-08
+POSTHOOK: Lineage: tab_part_n11 PARTITION(ds=2008-04-08).key SIMPLE [(srcbucket_mapjoin_part_n20)srcbucket_mapjoin_part_n20.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: tab_part_n11 PARTITION(ds=2008-04-08).value SIMPLE [(srcbucket_mapjoin_part_n20)srcbucket_mapjoin_part_n20.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: CREATE TABLE tab_n10(key int, value string) PARTITIONED BY(ds STRING) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tab_n10
+POSTHOOK: query: CREATE TABLE tab_n10(key int, value string) PARTITIONED BY(ds STRING) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tab_n10
+PREHOOK: query: explain
+insert overwrite table tab_n10 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_n18
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+insert overwrite table tab_n10 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_n18
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: srcbucket_mapjoin_n18
+                  Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: int), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      sort order: 
+                      Map-reduce partition columns: _col0 (type: int)
+                      Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: int), _col1 (type: string)
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: vectorized, llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: int), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.tab_n10
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.tab_n10
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+
+PREHOOK: query: insert overwrite table tab_n10 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_n18
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcbucket_mapjoin_n18
+PREHOOK: Input: default@srcbucket_mapjoin_n18@ds=2008-04-08
+PREHOOK: Output: default@tab_n10@ds=2008-04-08
+POSTHOOK: query: insert overwrite table tab_n10 partition (ds='2008-04-08')
+  select key,value from srcbucket_mapjoin_n18
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcbucket_mapjoin_n18
+POSTHOOK: Input: default@srcbucket_mapjoin_n18@ds=2008-04-08
+POSTHOOK: Output: default@tab_n10@ds=2008-04-08
+POSTHOOK: Lineage: tab_n10 PARTITION(ds=2008-04-08).key SIMPLE [(srcbucket_mapjoin_n18)srcbucket_mapjoin_n18.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: tab_n10 PARTITION(ds=2008-04-08).value SIMPLE [(srcbucket_mapjoin_n18)srcbucket_mapjoin_n18.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: analyze table tab_part_n11 compute statistics for columns
+PREHOOK: type: ANALYZE_TABLE
+PREHOOK: Input: default@tab_part_n11
+PREHOOK: Input: default@tab_part_n11@ds=2008-04-08
+PREHOOK: Output: default@tab_part_n11
+PREHOOK: Output: default@tab_part_n11@ds=2008-04-08
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table tab_part_n11 compute statistics for columns
+POSTHOOK: type: ANALYZE_TABLE
+POSTHOOK: Input: default@tab_part_n11
+POSTHOOK: Input: default@tab_part_n11@ds=2008-04-08
+POSTHOOK: Output: default@tab_part_n11
+POSTHOOK: Output: default@tab_part_n11@ds=2008-04-08
+#### A masked pattern was here ####
+PREHOOK: query: analyze table tab_n10 compute statistics for columns
+PREHOOK: type: ANALYZE_TABLE
+PREHOOK: Input: default@tab_n10
+PREHOOK: Input: default@tab_n10@ds=2008-04-08
+PREHOOK: Output: default@tab_n10
+PREHOOK: Output: default@tab_n10@ds=2008-04-08
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table tab_n10 compute statistics for columns
+POSTHOOK: type: ANALYZE_TABLE
+POSTHOOK: Input: default@tab_n10
+POSTHOOK: Input: default@tab_n10@ds=2008-04-08
+POSTHOOK: Output: default@tab_n10
+POSTHOOK: Output: default@tab_n10@ds=2008-04-08
+#### A masked pattern was here ####
+PREHOOK: query: explain
+select t1.key, t1.value, t2.key, t2.value from srcbucket_mapjoin_n18 t1, srcbucket_mapjoin_part_n20 t2 where t1.key = t2.key order by t1.key, t1.value, t2.key, t2.value
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t1.key, t1.value, t2.key, t2.value from srcbucket_mapjoin_n18 t1, srcbucket_mapjoin_part_n20 t2 where t1.key = t2.key order by t1.key, t1.value, t2.key, t2.value
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 3 (CUSTOM_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  filterExpr: key is not null (type: boolean)
+                  Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: key (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                      Map Join Operator
+                        condition map:
+                             Inner Join 0 to 1
+                        keys:
+                          0 _col0 (type: int)
+                          1 _col0 (type: int)
+                        outputColumnNames: _col0, _col1, _col2, _col3
+                        input vertices:
+                          1 Map 3
+                        Statistics: Num rows: 220 Data size: 41800 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: int), _col3 (type: string)
+                          sort order: ++++
+                          Statistics: Num rows: 220 Data size: 41800 Basic stats: COMPLETE Column stats: COMPLETE
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  filterExpr: key is not null (type: boolean)
+                  Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: key (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col1 (type: string)
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: vectorized, llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: int), KEY.reducesinkkey3 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 220 Data size: 41800 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 220 Data size: 41800 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select t1.key, t1.value, t2.key, t2.value from srcbucket_mapjoin_n18 t1, srcbucket_mapjoin_part_n20 t2 where t1.key = t2.key order by t1.key, t1.value, t2.key, t2.value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcbucket_mapjoin_n18
+PREHOOK: Input: default@srcbucket_mapjoin_n18@ds=2008-04-08
+PREHOOK: Input: default@srcbucket_mapjoin_part_n20
+PREHOOK: Input: default@srcbucket_mapjoin_part_n20@ds=2008-04-08
+#### A masked pattern was here ####
+POSTHOOK: query: select t1.key, t1.value, t2.key, t2.value from srcbucket_mapjoin_n18 t1, srcbucket_mapjoin_part_n20 t2 where t1.key = t2.key order by t1.key, t1.value, t2.key, t2.value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcbucket_mapjoin_n18
+POSTHOOK: Input: default@srcbucket_mapjoin_n18@ds=2008-04-08
+POSTHOOK: Input: default@srcbucket_mapjoin_part_n20
+POSTHOOK: Input: default@srcbucket_mapjoin_part_n20@ds=2008-04-08
+#### A masked pattern was here ####
+82	val_82	82	val_82
+86	val_86	86	val_86
+145	val_145	145	val_145
+152	val_152	152	val_152
+152	val_152	152	val_152
+219	val_219	219	val_219
+219	val_219	219	val_219
+255	val_255	255	val_255
+255	val_255	255	val_255
+273	val_273	273	val_273
+273	val_273	273	val_273
+273	val_273	273	val_273
+277	val_277	277	val_277
+277	val_277	277	val_277
+277	val_277	277	val_277
+277	val_277	277	val_277
+369	val_369	369	val_369
+369	val_369	369	val_369
+369	val_369	369	val_369
+406	val_406	406	val_406
+406	val_406	406	val_406
+406	val_406	406	val_406
+406	val_406	406	val_406
+417	val_417	417	val_417
+417	val_417	417	val_417
+417	val_417	417	val_417
+446	val_446	446	val_446
+PREHOOK: query: explain
+select t1.key, t1.value, t2.key, t2.value from tab_part_n11 t1, tab_n10 t2 where t1.key = t2.key order by t1.key, t1.value, t2.key, t2.value
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t1.key, t1.value, t2.key, t2.value from tab_part_n11 t1, tab_n10 t2 where t1.key = t2.key order by t1.key, t1.value, t2.key, t2.value
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 3 (CUSTOM_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  filterExpr: key is not null (type: boolean)
+                  Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: key (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                      Map Join Operator
+                        condition map:
+                             Inner Join 0 to 1
+                        keys:
+                          0 _col0 (type: int)
+                          1 _col0 (type: int)
+                        outputColumnNames: _col0, _col1, _col2, _col3
+                        input vertices:
+                          1 Map 3
+                        Statistics: Num rows: 220 Data size: 41800 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: int), _col3 (type: string)
+                          sort order: ++++
+                          Statistics: Num rows: 220 Data size: 41800 Basic stats: COMPLETE Column stats: COMPLETE
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  filterExpr: key is not null (type: boolean)
+                  Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: key (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col1 (type: string)
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: vectorized, llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: int), KEY.reducesinkkey3 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 220 Data size: 41800 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 220 Data size: 41800 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select t1.key, t1.value, t2.key, t2.value from tab_part_n11 t1, tab_n10 t2 where t1.key = t2.key order by t1.key, t1.value, t2.key, t2.value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab_n10
+PREHOOK: Input: default@tab_n10@ds=2008-04-08
+PREHOOK: Input: default@tab_part_n11
+PREHOOK: Input: default@tab_part_n11@ds=2008-04-08
+#### A masked pattern was here ####
+POSTHOOK: query: select t1.key, t1.value, t2.key, t2.value from tab_part_n11 t1, tab_n10 t2 where t1.key = t2.key order by t1.key, t1.value, t2.key, t2.value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab_n10
+POSTHOOK: Input: default@tab_n10@ds=2008-04-08
+POSTHOOK: Input: default@tab_part_n11
+POSTHOOK: Input: default@tab_part_n11@ds=2008-04-08
+#### A masked pattern was here ####
+82	val_82	82	val_82
+86	val_86	86	val_86
+145	val_145	145	val_145
+152	val_152	152	val_152
+152	val_152	152	val_152
+219	val_219	219	val_219
+219	val_219	219	val_219
+255	val_255	255	val_255
+255	val_255	255	val_255
+273	val_273	273	val_273
+273	val_273	273	val_273
+273	val_273	273	val_273
+277	val_277	277	val_277
+277	val_277	277	val_277
+277	val_277	277	val_277
+277	val_277	277	val_277
+369	val_369	369	val_369
+369	val_369	369	val_369
+369	val_369	369	val_369
+406	val_406	406	val_406
+406	val_406	406	val_406
+406	val_406	406	val_406
+406	val_406	406	val_406
+417	val_417	417	val_417
+417	val_417	417	val_417
+417	val_417	417	val_417
+446	val_446	446	val_446


[06/11] hive git commit: HIVE-20082: HiveDecimal to string conversion doesn't format the decimal correctly (Jason Dere, reviewed by Ashutosh Chauhan)

Posted by se...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/orc_merge1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge1.q.out b/ql/src/test/results/clientpositive/orc_merge1.q.out
index 622e899..ec6dbdd 100644
--- a/ql/src/test/results/clientpositive/orc_merge1.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge1.q.out
@@ -75,7 +75,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                     name: default.orcfile_merge1_n1
               Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                 outputColumnNames: key, value, ds, part
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
@@ -189,7 +189,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                     name: default.orcfile_merge1b_n1
               Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                 outputColumnNames: key, value, ds, part
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
@@ -342,7 +342,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                     name: default.orcfile_merge1c_n1
               Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                 outputColumnNames: key, value, ds, part
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/orc_merge10.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge10.q.out b/ql/src/test/results/clientpositive/orc_merge10.q.out
index b54abfa..f04e4a3 100644
--- a/ql/src/test/results/clientpositive/orc_merge10.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge10.q.out
@@ -75,7 +75,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                     name: default.orcfile_merge1
               Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                 outputColumnNames: key, value, ds, part
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
@@ -189,7 +189,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                     name: default.orcfile_merge1b
               Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                 outputColumnNames: key, value, ds, part
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
@@ -342,7 +342,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                     name: default.orcfile_merge1c
               Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                 outputColumnNames: key, value, ds, part
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/orc_merge2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge2.q.out b/ql/src/test/results/clientpositive/orc_merge2.q.out
index 2f48619..f1a7ba7 100644
--- a/ql/src/test/results/clientpositive/orc_merge2.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge2.q.out
@@ -54,7 +54,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                     name: default.orcfile_merge2a_n0
               Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string), UDFToString(_col3) (type: string)
+                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string), CAST( _col3 AS STRING) (type: string)
                 outputColumnNames: key, value, one, two, three
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/orc_merge_diff_fs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge_diff_fs.q.out b/ql/src/test/results/clientpositive/orc_merge_diff_fs.q.out
index 2b4aec3..129b19e 100644
--- a/ql/src/test/results/clientpositive/orc_merge_diff_fs.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge_diff_fs.q.out
@@ -75,7 +75,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                     name: default.orcfile_merge1_n0
               Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                 outputColumnNames: key, value, ds, part
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
@@ -189,7 +189,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                     name: default.orcfile_merge1b_n0
               Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                 outputColumnNames: key, value, ds, part
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
@@ -342,7 +342,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                     name: default.orcfile_merge1c_n0
               Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                 outputColumnNames: key, value, ds, part
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/perf/spark/query36.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query36.q.out b/ql/src/test/results/clientpositive/perf/spark/query36.q.out
index bdf96a9..d3bea76 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query36.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query36.q.out
@@ -224,9 +224,9 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1, _col2, _col3, _col4
                   Statistics: Num rows: 1149975358 Data size: 101451159969 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
-                    key expressions: (grouping(_col4, 1) + grouping(_col4, 0)) (type: bigint), CASE WHEN ((grouping(_col4, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END (type: string), (_col2 / _col3) (type: decimal(37,20))
+                    key expressions: (grouping(_col4, 1) + grouping(_col4, 0)) (type: bigint), CASE WHEN ((grouping(_col4, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END (type: string), (_col2 / _col3) (type: decimal(37,20))
                     sort order: +++
-                    Map-reduce partition columns: (grouping(_col4, 1) + grouping(_col4, 0)) (type: bigint), CASE WHEN ((grouping(_col4, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END (type: string)
+                    Map-reduce partition columns: (grouping(_col4, 1) + grouping(_col4, 0)) (type: bigint), CASE WHEN ((grouping(_col4, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END (type: string)
                     Statistics: Num rows: 1149975358 Data size: 101451159969 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: decimal(17,2)), _col3 (type: decimal(17,2)), _col4 (type: bigint)
         Reducer 5 
@@ -246,7 +246,7 @@ STAGE PLANS:
                         input alias: ptf_1
                         name: windowingtablefunction
                         order by: (_col2 / _col3) ASC NULLS FIRST
-                        partition by: (grouping(_col4, 1) + grouping(_col4, 0)), CASE WHEN ((grouping(_col4, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END
+                        partition by: (grouping(_col4, 1) + grouping(_col4, 0)), CASE WHEN ((grouping(_col4, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END
                         raw input shape:
                         window functions:
                             window function definition

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/perf/spark/query70.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query70.q.out b/ql/src/test/results/clientpositive/perf/spark/query70.q.out
index 2f94f94..4222b52 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query70.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query70.q.out
@@ -377,9 +377,9 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1, _col2, _col3
                   Statistics: Num rows: 1149975358 Data size: 101451159969 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
-                    key expressions: (grouping(_col3, 1) + grouping(_col3, 0)) (type: bigint), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END (type: string), _col2 (type: decimal(17,2))
+                    key expressions: (grouping(_col3, 1) + grouping(_col3, 0)) (type: bigint), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END (type: string), _col2 (type: decimal(17,2))
                     sort order: ++-
-                    Map-reduce partition columns: (grouping(_col3, 1) + grouping(_col3, 0)) (type: bigint), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END (type: string)
+                    Map-reduce partition columns: (grouping(_col3, 1) + grouping(_col3, 0)) (type: bigint), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END (type: string)
                     Statistics: Num rows: 1149975358 Data size: 101451159969 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col0 (type: string), _col1 (type: string), _col3 (type: bigint)
         Reducer 5 
@@ -399,7 +399,7 @@ STAGE PLANS:
                         input alias: ptf_1
                         name: windowingtablefunction
                         order by: _col2 DESC NULLS LAST
-                        partition by: (grouping(_col3, 1) + grouping(_col3, 0)), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END
+                        partition by: (grouping(_col3, 1) + grouping(_col3, 0)), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END
                         raw input shape:
                         window functions:
                             window function definition

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/perf/spark/query86.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query86.q.out b/ql/src/test/results/clientpositive/perf/spark/query86.q.out
index ff7ca83..1d1e4ef 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query86.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query86.q.out
@@ -180,9 +180,9 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1, _col2, _col3
                   Statistics: Num rows: 261364852 Data size: 35538061226 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
-                    key expressions: (grouping(_col3, 1) + grouping(_col3, 0)) (type: bigint), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END (type: string), _col2 (type: decimal(17,2))
+                    key expressions: (grouping(_col3, 1) + grouping(_col3, 0)) (type: bigint), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END (type: string), _col2 (type: decimal(17,2))
                     sort order: ++-
-                    Map-reduce partition columns: (grouping(_col3, 1) + grouping(_col3, 0)) (type: bigint), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END (type: string)
+                    Map-reduce partition columns: (grouping(_col3, 1) + grouping(_col3, 0)) (type: bigint), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END (type: string)
                     Statistics: Num rows: 261364852 Data size: 35538061226 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col0 (type: string), _col1 (type: string), _col3 (type: bigint)
         Reducer 4 
@@ -202,7 +202,7 @@ STAGE PLANS:
                         input alias: ptf_1
                         name: windowingtablefunction
                         order by: _col2 DESC NULLS LAST
-                        partition by: (grouping(_col3, 1) + grouping(_col3, 0)), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END
+                        partition by: (grouping(_col3, 1) + grouping(_col3, 0)), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END
                         raw input shape:
                         window functions:
                             window function definition

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/perf/tez/query36.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query36.q.out b/ql/src/test/results/clientpositive/perf/tez/query36.q.out
index 53c53db..c86c9e4 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query36.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query36.q.out
@@ -85,12 +85,12 @@ Stage-0
               Select Operator [SEL_115] (rows=1149975358 width=88)
                 Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                 PTF Operator [PTF_114] (rows=1149975358 width=88)
-                  Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(_col2 / _col3) ASC NULLS FIRST","partition by:":"(grouping(_col4, 1) + grouping(_col4, 0)), CASE WHEN ((grouping(_col4, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END"}]
+                  Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(_col2 / _col3) ASC NULLS FIRST","partition by:":"(grouping(_col4, 1) + grouping(_col4, 0)), CASE WHEN ((grouping(_col4, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END"}]
                   Select Operator [SEL_113] (rows=1149975358 width=88)
                     Output:["_col0","_col1","_col2","_col3","_col4"]
                   <-Reducer 5 [SIMPLE_EDGE] vectorized
                     SHUFFLE [RS_112]
-                      PartitionCols:(grouping(_col4, 1) + grouping(_col4, 0)), CASE WHEN ((grouping(_col4, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END
+                      PartitionCols:(grouping(_col4, 1) + grouping(_col4, 0)), CASE WHEN ((grouping(_col4, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END
                       Select Operator [SEL_111] (rows=1149975358 width=88)
                         Output:["_col0","_col1","_col2","_col3","_col4"]
                         Group By Operator [GBY_110] (rows=1149975358 width=88)

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/perf/tez/query70.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query70.q.out b/ql/src/test/results/clientpositive/perf/tez/query70.q.out
index f21c7ad..4efb85a 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query70.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query70.q.out
@@ -107,12 +107,12 @@ Stage-0
               Select Operator [SEL_183] (rows=1149975358 width=88)
                 Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                 PTF Operator [PTF_182] (rows=1149975358 width=88)
-                  Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col2 DESC NULLS LAST","partition by:":"(grouping(_col3, 1) + grouping(_col3, 0)), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END"}]
+                  Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col2 DESC NULLS LAST","partition by:":"(grouping(_col3, 1) + grouping(_col3, 0)), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END"}]
                   Select Operator [SEL_181] (rows=1149975358 width=88)
                     Output:["_col0","_col1","_col2","_col3"]
                   <-Reducer 5 [SIMPLE_EDGE] vectorized
                     SHUFFLE [RS_180]
-                      PartitionCols:(grouping(_col3, 1) + grouping(_col3, 0)), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END
+                      PartitionCols:(grouping(_col3, 1) + grouping(_col3, 0)), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END
                       Select Operator [SEL_179] (rows=1149975358 width=88)
                         Output:["_col0","_col1","_col2","_col3"]
                         Group By Operator [GBY_178] (rows=1149975358 width=88)

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/perf/tez/query86.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query86.q.out b/ql/src/test/results/clientpositive/perf/tez/query86.q.out
index 1c8d228..8140b72 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query86.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query86.q.out
@@ -75,12 +75,12 @@ Stage-0
               Select Operator [SEL_85] (rows=261364852 width=135)
                 Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                 PTF Operator [PTF_84] (rows=261364852 width=135)
-                  Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col2 DESC NULLS LAST","partition by:":"(grouping(_col3, 1) + grouping(_col3, 0)), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END"}]
+                  Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col2 DESC NULLS LAST","partition by:":"(grouping(_col3, 1) + grouping(_col3, 0)), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END"}]
                   Select Operator [SEL_83] (rows=261364852 width=135)
                     Output:["_col0","_col1","_col2","_col3"]
                   <-Reducer 4 [SIMPLE_EDGE] vectorized
                     SHUFFLE [RS_82]
-                      PartitionCols:(grouping(_col3, 1) + grouping(_col3, 0)), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (UDFToString(null)) END
+                      PartitionCols:(grouping(_col3, 1) + grouping(_col3, 0)), CASE WHEN ((grouping(_col3, 0) = 0)) THEN (_col0) ELSE (CAST( null AS STRING)) END
                       Select Operator [SEL_81] (rows=261364852 width=135)
                         Output:["_col0","_col1","_col2","_col3"]
                         Group By Operator [GBY_80] (rows=261364852 width=135)

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/show_functions.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/show_functions.q.out b/ql/src/test/results/clientpositive/show_functions.q.out
index 629781a..90608e2 100644
--- a/ql/src/test/results/clientpositive/show_functions.q.out
+++ b/ql/src/test/results/clientpositive/show_functions.q.out
@@ -269,7 +269,6 @@ udftofloat
 udftointeger
 udftolong
 udftoshort
-udftostring
 unbase64
 unhex
 unix_timestamp

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/smb_mapjoin_20.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_20.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_20.q.out
index cfe1e02..efaa17e 100644
--- a/ql/src/test/results/clientpositive/smb_mapjoin_20.q.out
+++ b/ql/src/test/results/clientpositive/smb_mapjoin_20.q.out
@@ -57,15 +57,15 @@ STAGE PLANS:
               outputColumnNames: _col0, _col1
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
-                key expressions: UDFToString(_col0) (type: string)
+                key expressions: CAST( _col0 AS STRING) (type: string)
                 sort order: +
-                Map-reduce partition columns: UDFToString(_col0) (type: string)
+                Map-reduce partition columns: CAST( _col0 AS STRING) (type: string)
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col0 (type: int), _col1 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
-          expressions: UDFToString(VALUE._col0) (type: string), VALUE._col1 (type: string), VALUE._col1 (type: string)
+          expressions: CAST( VALUE._col0 AS STRING) (type: string), VALUE._col1 (type: string), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
@@ -1442,15 +1442,15 @@ STAGE PLANS:
               outputColumnNames: _col0, _col1
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
-                key expressions: UDFToString(_col0) (type: string)
+                key expressions: CAST( _col0 AS STRING) (type: string)
                 sort order: +
-                Map-reduce partition columns: UDFToString(_col0) (type: string)
+                Map-reduce partition columns: CAST( _col0 AS STRING) (type: string)
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col0 (type: int), _col1 (type: string)
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
-          expressions: UDFToString(VALUE._col0) (type: string), VALUE._col1 (type: string), VALUE._col1 (type: string)
+          expressions: CAST( VALUE._col0 AS STRING) (type: string), VALUE._col1 (type: string), VALUE._col1 (type: string)
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
           File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/bucket_map_join_spark1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket_map_join_spark1.q.out b/ql/src/test/results/clientpositive/spark/bucket_map_join_spark1.q.out
index 2a69ba0..6e7a55c 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_spark1.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_spark1.q.out
@@ -248,7 +248,7 @@ STAGE PLANS:
                         Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                         BucketMapJoin: true
                         Select Operator
-                          expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                          expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                           File Output Operator
@@ -563,7 +563,7 @@ STAGE PLANS:
                         Position of Big Table: 0
                         Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                         Select Operator
-                          expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                          expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                           File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/bucket_map_join_spark2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket_map_join_spark2.q.out b/ql/src/test/results/clientpositive/spark/bucket_map_join_spark2.q.out
index 7fecee2..142ffc2 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_spark2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_spark2.q.out
@@ -232,7 +232,7 @@ STAGE PLANS:
                         Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                         BucketMapJoin: true
                         Select Operator
-                          expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                          expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                           File Output Operator
@@ -553,7 +553,7 @@ STAGE PLANS:
                         Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                         BucketMapJoin: true
                         Select Operator
-                          expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                          expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                           File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/bucket_map_join_spark3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket_map_join_spark3.q.out b/ql/src/test/results/clientpositive/spark/bucket_map_join_spark3.q.out
index 75aaffa..1b31fee 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_spark3.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_spark3.q.out
@@ -232,7 +232,7 @@ STAGE PLANS:
                         Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                         BucketMapJoin: true
                         Select Operator
-                          expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                          expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                           File Output Operator
@@ -547,7 +547,7 @@ STAGE PLANS:
                         Position of Big Table: 1
                         Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                         Select Operator
-                          expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                          expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                           File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out
index cf7503e..2d24266 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out
@@ -506,7 +506,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col3
                 Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator
@@ -876,7 +876,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col3
                 Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out
index d23fe49..899885e 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out
@@ -287,7 +287,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col3
                 Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator
@@ -662,7 +662,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col3
                 Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator
@@ -1104,7 +1104,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col3
                 Statistics: Num rows: 171 Data size: 67364 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 171 Data size: 67364 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
index 8c33dbf..6e90ffc 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
@@ -311,7 +311,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col4
                 Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col4 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col4 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator
@@ -686,7 +686,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col4
                 Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col4 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col4 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out
index 287bde6..25e7062 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin4.q.out
@@ -309,7 +309,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col3
                 Statistics: Num rows: 1 Data size: 30250 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 1 Data size: 30250 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -670,7 +670,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col3
                 Statistics: Num rows: 1 Data size: 30250 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 1 Data size: 30250 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out
index 3f17e4e..7b598ed 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin5.q.out
@@ -290,7 +290,7 @@ STAGE PLANS:
                       Statistics: Num rows: 327 Data size: 127864 Basic stats: PARTIAL Column stats: NONE
                       BucketMapJoin: true
                       Select Operator
-                        expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col6 (type: string)
+                        expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col6 (type: string)
                         outputColumnNames: _col0, _col1, _col2
                         Statistics: Num rows: 327 Data size: 127864 Basic stats: PARTIAL Column stats: NONE
                         File Output Operator
@@ -705,7 +705,7 @@ STAGE PLANS:
                       Statistics: Num rows: 171 Data size: 67364 Basic stats: PARTIAL Column stats: NONE
                       BucketMapJoin: true
                       Select Operator
-                        expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col6 (type: string)
+                        expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col6 (type: string)
                         outputColumnNames: _col0, _col1, _col2
                         Statistics: Num rows: 171 Data size: 67364 Basic stats: PARTIAL Column stats: NONE
                         File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out
index cade09f..54accb9 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative.q.out
@@ -187,7 +187,7 @@ STAGE PLANS:
                       Position of Big Table: 0
                       Statistics: Num rows: 154 Data size: 46200 Basic stats: PARTIAL Column stats: NONE
                       Select Operator
-                        expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col6 (type: string)
+                        expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col6 (type: string)
                         outputColumnNames: _col0, _col1, _col2
                         Statistics: Num rows: 154 Data size: 46200 Basic stats: PARTIAL Column stats: NONE
                         File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out
index a4e2bef..46b87c1 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out
@@ -252,7 +252,7 @@ STAGE PLANS:
                       Statistics: Num rows: 171 Data size: 67364 Basic stats: PARTIAL Column stats: NONE
                       BucketMapJoin: true
                       Select Operator
-                        expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col6 (type: string)
+                        expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col6 (type: string)
                         outputColumnNames: _col0, _col1, _col2
                         Statistics: Num rows: 171 Data size: 67364 Basic stats: PARTIAL Column stats: NONE
                         File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
index 5124d95..88bb031 100644
--- a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
+++ b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
@@ -201,7 +201,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -222,7 +222,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -549,7 +549,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator
@@ -584,7 +584,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby5.q.out b/ql/src/test/results/clientpositive/spark/groupby5.q.out
index 6f640cf..22dacc5 100644
--- a/ql/src/test/results/clientpositive/spark/groupby5.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby5.q.out
@@ -71,7 +71,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby5_noskew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby5_noskew.q.out b/ql/src/test/results/clientpositive/spark/groupby5_noskew.q.out
index 5f3678e..afa8b95 100644
--- a/ql/src/test/results/clientpositive/spark/groupby5_noskew.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby5_noskew.q.out
@@ -55,7 +55,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby7_map.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby7_map.q.out b/ql/src/test/results/clientpositive/spark/groupby7_map.q.out
index d5ea3ec..d32a7be 100644
--- a/ql/src/test/results/clientpositive/spark/groupby7_map.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby7_map.q.out
@@ -70,7 +70,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -91,7 +91,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out b/ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out
index 9d09491..2c03e67 100644
--- a/ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out
@@ -65,7 +65,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                    expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
@@ -83,7 +83,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                    expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby7_map_skew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby7_map_skew.q.out b/ql/src/test/results/clientpositive/spark/groupby7_map_skew.q.out
index 5868f7a..3d23715 100644
--- a/ql/src/test/results/clientpositive/spark/groupby7_map_skew.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby7_map_skew.q.out
@@ -86,7 +86,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -107,7 +107,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby7_noskew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby7_noskew.q.out b/ql/src/test/results/clientpositive/spark/groupby7_noskew.q.out
index 53345aa..5603ecf 100644
--- a/ql/src/test/results/clientpositive/spark/groupby7_noskew.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby7_noskew.q.out
@@ -64,7 +64,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -84,7 +84,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out b/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out
index 6880900..cfb639c 100644
--- a/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby7_noskew_multi_single_reducer.q.out
@@ -67,7 +67,7 @@ STAGE PLANS:
                   Number of rows: 10
                   Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                    expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
@@ -89,7 +89,7 @@ STAGE PLANS:
                   Number of rows: 10
                   Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                    expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby8.q.out b/ql/src/test/results/clientpositive/spark/groupby8.q.out
index c6cac1b..803d9ed 100644
--- a/ql/src/test/results/clientpositive/spark/groupby8.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby8.q.out
@@ -78,7 +78,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -98,7 +98,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -854,7 +854,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -874,7 +874,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby8_map.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby8_map.q.out b/ql/src/test/results/clientpositive/spark/groupby8_map.q.out
index 40d3e7c..0c28890 100644
--- a/ql/src/test/results/clientpositive/spark/groupby8_map.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby8_map.q.out
@@ -64,7 +64,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                    expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
@@ -82,7 +82,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                    expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby8_map_skew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby8_map_skew.q.out b/ql/src/test/results/clientpositive/spark/groupby8_map_skew.q.out
index 053c717..ea9e679 100644
--- a/ql/src/test/results/clientpositive/spark/groupby8_map_skew.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby8_map_skew.q.out
@@ -83,7 +83,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -103,7 +103,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby8_noskew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby8_noskew.q.out b/ql/src/test/results/clientpositive/spark/groupby8_noskew.q.out
index 2ef72b7..2146c56 100644
--- a/ql/src/test/results/clientpositive/spark/groupby8_noskew.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby8_noskew.q.out
@@ -64,7 +64,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                    expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
@@ -82,7 +82,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                    expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby9.q.out b/ql/src/test/results/clientpositive/spark/groupby9.q.out
index 316f936..5cbb4e7 100644
--- a/ql/src/test/results/clientpositive/spark/groupby9.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby9.q.out
@@ -88,7 +88,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -108,7 +108,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -875,7 +875,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -895,7 +895,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), UDFToString(_col2) (type: string)
+                  expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), CAST( _col2 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -1662,7 +1662,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -1682,7 +1682,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -2454,7 +2454,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -2475,7 +2475,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -3242,7 +3242,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -3262,7 +3262,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), UDFToString(_col2) (type: string)
+                  expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), CAST( _col2 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby_position.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_position.q.out b/ql/src/test/results/clientpositive/spark/groupby_position.q.out
index 7bb5f18..971913a 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_position.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_position.q.out
@@ -86,7 +86,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -106,7 +106,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -279,7 +279,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+                  expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -299,7 +299,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), UDFToString(_col2) (type: string)
+                  expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), CAST( _col2 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out b/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out
index 8737172..62cd640 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out
@@ -2271,7 +2271,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: bigint)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: bigint)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
                   Select Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/groupby_sort_skew_1_23.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_sort_skew_1_23.q.out b/ql/src/test/results/clientpositive/spark/groupby_sort_skew_1_23.q.out
index 5712030..016cb3b 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_sort_skew_1_23.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_sort_skew_1_23.q.out
@@ -1683,7 +1683,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(UDFToString(_col0)) (type: int), UDFToInteger(_col1) (type: int)
+                  expressions: UDFToInteger(CAST( _col0 AS STRING)) (type: int), UDFToInteger(_col1) (type: int)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -2371,7 +2371,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: bigint)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: bigint)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
                   Select Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out b/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
index 268dd10..1843e36 100644
--- a/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
+++ b/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
@@ -78,7 +78,7 @@ STAGE PLANS:
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
-                        expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                        expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                         outputColumnNames: _col0, _col1
                         Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                         File Output Operator
@@ -240,7 +240,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -501,7 +501,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator


[09/11] hive git commit: HIVE-20082: HiveDecimal to string conversion doesn't format the decimal correctly (Jason Dere, reviewed by Ashutosh Chauhan)

Posted by se...@apache.org.
HIVE-20082: HiveDecimal to string conversion doesn't format the decimal correctly (Jason Dere, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/26f4d8ea
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/26f4d8ea
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/26f4d8ea

Branch: refs/heads/master-txnstats
Commit: 26f4d8ea422eda39c2d24c2cce99f74686a40f48
Parents: 87b9f64
Author: Jason Dere <jd...@hortonworks.com>
Authored: Tue Jul 24 10:30:15 2018 -0700
Committer: Jason Dere <jd...@hortonworks.com>
Committed: Tue Jul 24 10:30:15 2018 -0700

----------------------------------------------------------------------
 .../predicate/TestAccumuloRangeGenerator.java   |   7 +-
 .../DTIColumnArithmeticDTIColumnNoConvert.txt   |   1 -
 .../DTIScalarArithmeticDTIColumnNoConvert.txt   |   1 -
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |   7 +-
 .../ql/exec/vector/VectorizationContext.java    |  26 +--
 .../vector/expressions/CastDecimalToString.java |   2 +-
 .../hive/ql/index/IndexPredicateAnalyzer.java   |   2 +
 .../calcite/translator/RexNodeConverter.java    |   4 +-
 .../hive/ql/optimizer/physical/Vectorizer.java  |   3 +-
 .../apache/hadoop/hive/ql/udf/UDFToString.java  | 181 -------------------
 .../hive/ql/udf/generic/GenericUDFToString.java |  79 ++++++++
 .../vector/expressions/TestVectorTypeCasts.java |   6 +-
 .../clientpositive/autoColumnStats_6.q.out      |   2 +-
 .../clientpositive/bucket_map_join_spark1.q.out |   4 +-
 .../clientpositive/bucket_map_join_spark2.q.out |   4 +-
 .../clientpositive/bucket_map_join_spark3.q.out |   4 +-
 .../results/clientpositive/bucketmapjoin5.q.out |   4 +-
 .../clientpositive/bucketmapjoin_negative.q.out |   2 +-
 .../bucketmapjoin_negative2.q.out               |   2 +-
 .../bucketsortoptimize_insert_3.q.out           |   2 +-
 .../clientpositive/char_pad_convert.q.out       |   4 +-
 .../column_pruner_multiple_children.q.out       |   2 +-
 .../test/results/clientpositive/decimal_2.q.out |   4 +-
 .../test/results/clientpositive/groupby12.q.out |   2 +-
 .../test/results/clientpositive/groupby5.q.out  |   2 +-
 .../clientpositive/groupby5_noskew.q.out        |   2 +-
 .../results/clientpositive/groupby7_map.q.out   |   4 +-
 .../groupby7_map_multi_single_reducer.q.out     |   4 +-
 .../clientpositive/groupby7_map_skew.q.out      |   4 +-
 .../clientpositive/groupby7_noskew.q.out        |   4 +-
 .../groupby7_noskew_multi_single_reducer.q.out  |   4 +-
 .../test/results/clientpositive/groupby8.q.out  |   8 +-
 .../results/clientpositive/groupby8_map.q.out   |   4 +-
 .../clientpositive/groupby8_map_skew.q.out      |   4 +-
 .../clientpositive/groupby8_noskew.q.out        |   4 +-
 .../test/results/clientpositive/groupby9.q.out  |  20 +-
 .../clientpositive/groupby_cube_multi_gby.q.out |   2 +-
 .../clientpositive/groupby_position.q.out       |   8 +-
 .../clientpositive/groupby_sort_1_23.q.out      |   2 +-
 .../clientpositive/groupby_sort_skew_1_23.q.out |   4 +-
 .../infer_bucket_sort_dyn_part.q.out            |   2 +-
 .../infer_bucket_sort_grouping_operators.q.out  |   6 +-
 .../infer_bucket_sort_map_operators.q.out       |   6 +-
 .../infer_bucket_sort_num_buckets.q.out         |   2 +-
 .../results/clientpositive/input_part10.q.out   |   2 +-
 .../clientpositive/llap/bucketmapjoin1.q.out    |   4 +-
 .../clientpositive/llap/bucketmapjoin2.q.out    |   6 +-
 .../clientpositive/llap/bucketmapjoin3.q.out    |   4 +-
 .../clientpositive/llap/bucketmapjoin4.q.out    |   4 +-
 .../llap/default_constraint.q.out               |   4 +-
 .../llap/dynamic_partition_pruning.q.out        |  16 +-
 .../llap/insert_into_default_keyword.q.out      |  20 +-
 .../llap/materialized_view_rewrite_6.q.out      |   4 +-
 .../llap/multi_insert_lateral_view.q.out        |  32 ++--
 .../clientpositive/llap/orc_merge1.q.out        |   6 +-
 .../clientpositive/llap/orc_merge10.q.out       |   6 +-
 .../clientpositive/llap/orc_merge2.q.out        |   2 +-
 .../clientpositive/llap/orc_merge_diff_fs.q.out |   6 +-
 .../clientpositive/llap/rcfile_merge2.q.out     |   2 +-
 .../llap/schema_evol_text_nonvec_part.q.out     |   4 +-
 ...hema_evol_text_nonvec_part_all_complex.q.out |  16 +-
 ...l_text_nonvec_part_all_complex_llap_io.q.out |  16 +-
 .../schema_evol_text_nonvec_part_llap_io.q.out  |   4 +-
 .../llap/schema_evol_text_vec_part.q.out        |   4 +-
 .../schema_evol_text_vec_part_all_complex.q.out |  16 +-
 .../llap/schema_evol_text_vecrow_part.q.out     |   4 +-
 ...hema_evol_text_vecrow_part_all_complex.q.out |  16 +-
 .../results/clientpositive/llap/stats11.q.out   |   4 +-
 .../clientpositive/llap/subquery_multi.q.out    |   6 +-
 .../llap/tez_union_multiinsert.q.out            |  30 +--
 .../results/clientpositive/llap/union6.q.out    |   2 +-
 .../clientpositive/llap/unionDistinct_1.q.out   |  18 +-
 .../clientpositive/llap/unionDistinct_3.q.out   |  10 +-
 .../llap/vector_case_when_1.q.out               |  10 +-
 .../llap/vector_char_mapjoin1.q.out             |   6 +-
 .../clientpositive/llap/vector_decimal_1.q.out  |   2 +-
 .../clientpositive/llap/vector_decimal_2.q.out  |   8 +-
 .../llap/vector_decimal_expressions.q.out       |  28 +--
 .../llap/vector_string_concat.q.out             |   2 +-
 .../clientpositive/llap/vector_udf1.q.out       |   6 +-
 .../llap/vector_varchar_mapjoin1.q.out          |   6 +-
 .../clientpositive/llap/vectorized_casts.q.out  |   2 +-
 .../vectorized_dynamic_partition_pruning.q.out  |  16 +-
 .../clientpositive/multi_insert_mixed.q.out     |   4 +-
 .../results/clientpositive/orc_merge1.q.out     |   6 +-
 .../results/clientpositive/orc_merge10.q.out    |   6 +-
 .../results/clientpositive/orc_merge2.q.out     |   2 +-
 .../clientpositive/orc_merge_diff_fs.q.out      |   6 +-
 .../clientpositive/perf/spark/query36.q.out     |   6 +-
 .../clientpositive/perf/spark/query70.q.out     |   6 +-
 .../clientpositive/perf/spark/query86.q.out     |   6 +-
 .../clientpositive/perf/tez/query36.q.out       |   4 +-
 .../clientpositive/perf/tez/query70.q.out       |   4 +-
 .../clientpositive/perf/tez/query86.q.out       |   4 +-
 .../results/clientpositive/show_functions.q.out |   1 -
 .../results/clientpositive/smb_mapjoin_20.q.out |  12 +-
 .../spark/bucket_map_join_spark1.q.out          |   4 +-
 .../spark/bucket_map_join_spark2.q.out          |   4 +-
 .../spark/bucket_map_join_spark3.q.out          |   4 +-
 .../clientpositive/spark/bucketmapjoin1.q.out   |   4 +-
 .../clientpositive/spark/bucketmapjoin2.q.out   |   6 +-
 .../clientpositive/spark/bucketmapjoin3.q.out   |   4 +-
 .../clientpositive/spark/bucketmapjoin4.q.out   |   4 +-
 .../clientpositive/spark/bucketmapjoin5.q.out   |   4 +-
 .../spark/bucketmapjoin_negative.q.out          |   2 +-
 .../spark/bucketmapjoin_negative2.q.out         |   2 +-
 .../spark/dynamic_rdd_cache.q.out               |   8 +-
 .../results/clientpositive/spark/groupby5.q.out |   2 +-
 .../clientpositive/spark/groupby5_noskew.q.out  |   2 +-
 .../clientpositive/spark/groupby7_map.q.out     |   4 +-
 .../groupby7_map_multi_single_reducer.q.out     |   4 +-
 .../spark/groupby7_map_skew.q.out               |   4 +-
 .../clientpositive/spark/groupby7_noskew.q.out  |   4 +-
 .../groupby7_noskew_multi_single_reducer.q.out  |   4 +-
 .../results/clientpositive/spark/groupby8.q.out |   8 +-
 .../clientpositive/spark/groupby8_map.q.out     |   4 +-
 .../spark/groupby8_map_skew.q.out               |   4 +-
 .../clientpositive/spark/groupby8_noskew.q.out  |   4 +-
 .../results/clientpositive/spark/groupby9.q.out |  20 +-
 .../clientpositive/spark/groupby_position.q.out |   8 +-
 .../spark/groupby_sort_1_23.q.out               |   2 +-
 .../spark/groupby_sort_skew_1_23.q.out          |   4 +-
 .../spark/infer_bucket_sort_map_operators.q.out |   6 +-
 .../spark/multi_insert_lateral_view.q.out       |  32 ++--
 .../spark/multi_insert_mixed.q.out              |   4 +-
 .../clientpositive/spark/smb_mapjoin_20.q.out   |  12 +-
 .../spark/spark_dynamic_partition_pruning.q.out |  44 ++---
 ...k_vectorized_dynamic_partition_pruning.q.out |  44 ++---
 .../results/clientpositive/spark/stats1.q.out   |   2 +-
 .../clientpositive/spark/subquery_multi.q.out   |   6 +-
 .../results/clientpositive/spark/union17.q.out  |   8 +-
 .../results/clientpositive/spark/union18.q.out  |   2 +-
 .../results/clientpositive/spark/union19.q.out  |   4 +-
 .../results/clientpositive/spark/union20.q.out  |   4 +-
 .../results/clientpositive/spark/union32.q.out  |   4 +-
 .../results/clientpositive/spark/union33.q.out  |   4 +-
 .../results/clientpositive/spark/union6.q.out   |   2 +-
 .../clientpositive/spark/union_remove_19.q.out  |   4 +-
 .../spark/vector_string_concat.q.out            |   2 +-
 ql/src/test/results/clientpositive/stats1.q.out |   2 +-
 .../results/clientpositive/tablevalues.q.out    |   2 +-
 ql/src/test/results/clientpositive/udf3.q.out   |   2 +-
 .../results/clientpositive/udf_string.q.out     |   9 +-
 .../test/results/clientpositive/union17.q.out   |   6 +-
 .../test/results/clientpositive/union18.q.out   |   2 +-
 .../test/results/clientpositive/union19.q.out   |   4 +-
 .../test/results/clientpositive/union20.q.out   |   4 +-
 .../test/results/clientpositive/union32.q.out   |   4 +-
 .../test/results/clientpositive/union33.q.out   |   4 +-
 ql/src/test/results/clientpositive/union6.q.out |   2 +-
 .../clientpositive/union_remove_19.q.out        |   4 +-
 .../clientpositive/vector_case_when_1.q.out     |  10 +-
 .../clientpositive/vector_char_mapjoin1.q.out   |   4 +-
 .../clientpositive/vector_decimal_1.q.out       |   2 +-
 .../vector_decimal_expressions.q.out            |  28 +--
 .../clientpositive/vector_string_concat.q.out   |   2 +-
 .../vector_varchar_mapjoin1.q.out               |   4 +-
 .../clientpositive/vectorized_casts.q.out       |   2 +-
 .../PrimitiveObjectInspectorConverter.java      |   3 +-
 .../PrimitiveObjectInspectorUtils.java          |   3 +-
 .../TestObjectInspectorConverters.java          |  46 ++++-
 .../TestPrimitiveObjectInspectorUtils.java      |  14 ++
 162 files changed, 648 insertions(+), 695 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java
index 9df2aad..4975fa0 100644
--- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java
+++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java
@@ -38,7 +38,7 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.udf.UDFToString;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToString;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
@@ -388,12 +388,11 @@ public class TestAccumuloRangeGenerator {
     ExprNodeGenericFuncDesc addition = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, plus, Arrays.asList(fourty, fifty));
 
     // cast(.... as string)
-    UDFToString stringCast = new UDFToString();
-    GenericUDFBridge stringCastBridge = new GenericUDFBridge("cast", false, stringCast.getClass().getName());
+    GenericUDFToString stringCast = new GenericUDFToString();
 
     // cast (40 + 50 as string)
     ExprNodeGenericFuncDesc cast = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo,
-        stringCastBridge, "cast", Collections.<ExprNodeDesc> singletonList(addition));
+        stringCast, "cast", Collections.<ExprNodeDesc> singletonList(addition));
 
     ExprNodeDesc key = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "key", null,
         false);

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt
index bfccf2a..5307d33 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt
@@ -18,7 +18,6 @@
  
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
-import org.apache.hadoop.hive.ql.udf.UDFToString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.io.LongWritable;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt
index 514f3f9..f8cfa57 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt
@@ -18,7 +18,6 @@
  
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
-import org.apache.hadoop.hive.ql.udf.UDFToString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.io.LongWritable;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 3d5506f..4459184 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -109,7 +109,6 @@ import org.apache.hadoop.hive.ql.udf.UDFToFloat;
 import org.apache.hadoop.hive.ql.udf.UDFToInteger;
 import org.apache.hadoop.hive.ql.udf.UDFToLong;
 import org.apache.hadoop.hive.ql.udf.UDFToShort;
-import org.apache.hadoop.hive.ql.udf.UDFToString;
 import org.apache.hadoop.hive.ql.udf.UDFType;
 import org.apache.hadoop.hive.ql.udf.UDFUUID;
 import org.apache.hadoop.hive.ql.udf.UDFUnbase64;
@@ -402,12 +401,11 @@ public final class FunctionRegistry {
     system.registerUDF(serdeConstants.BIGINT_TYPE_NAME, UDFToLong.class, false, UDFToLong.class.getSimpleName());
     system.registerUDF(serdeConstants.FLOAT_TYPE_NAME, UDFToFloat.class, false, UDFToFloat.class.getSimpleName());
     system.registerUDF(serdeConstants.DOUBLE_TYPE_NAME, UDFToDouble.class, false, UDFToDouble.class.getSimpleName());
-    system.registerUDF(serdeConstants.STRING_TYPE_NAME, UDFToString.class, false, UDFToString.class.getSimpleName());
     // following mapping is to enable UDFName to UDF while generating expression for default value (in operator tree)
     //  e.g. cast(4 as string) is serialized as UDFToString(4) into metastore, to allow us to generate appropriate UDF for
     //  UDFToString we need the following mappings
     // Rest of the types e.g. DATE, CHAR, VARCHAR etc are already registered
-    system.registerUDF(UDFToString.class.getSimpleName(), UDFToString.class, false, UDFToString.class.getSimpleName());
+    // TODO: According to vgarg, these function mappings are no longer necessary as the default value logic has changed.
     system.registerUDF(UDFToBoolean.class.getSimpleName(), UDFToBoolean.class, false, UDFToBoolean.class.getSimpleName());
     system.registerUDF(UDFToDouble.class.getSimpleName(), UDFToDouble.class, false, UDFToDouble.class.getSimpleName());
     system.registerUDF(UDFToFloat.class.getSimpleName(), UDFToFloat.class, false, UDFToFloat.class.getSimpleName());
@@ -416,6 +414,7 @@ public final class FunctionRegistry {
     system.registerUDF(UDFToShort.class.getSimpleName(), UDFToShort.class, false, UDFToShort.class.getSimpleName());
     system.registerUDF(UDFToByte.class.getSimpleName(), UDFToByte.class, false, UDFToByte.class.getSimpleName());
 
+    system.registerGenericUDF(serdeConstants.STRING_TYPE_NAME, GenericUDFToString.class);
     system.registerGenericUDF(serdeConstants.DATE_TYPE_NAME, GenericUDFToDate.class);
     system.registerGenericUDF(serdeConstants.TIMESTAMP_TYPE_NAME, GenericUDFTimestamp.class);
     system.registerGenericUDF(serdeConstants.TIMESTAMPLOCALTZ_TYPE_NAME, GenericUDFToTimestampLocalTZ.class);
@@ -1626,7 +1625,7 @@ public final class FunctionRegistry {
     return udfClass == UDFToBoolean.class || udfClass == UDFToByte.class ||
         udfClass == UDFToDouble.class || udfClass == UDFToFloat.class ||
         udfClass == UDFToInteger.class || udfClass == UDFToLong.class ||
-        udfClass == UDFToShort.class || udfClass == UDFToString.class ||
+        udfClass == UDFToShort.class || udfClass == GenericUDFToString.class ||
         udfClass == GenericUDFToVarchar.class || udfClass == GenericUDFToChar.class ||
         udfClass == GenericUDFTimestamp.class || udfClass == GenericUDFToBinary.class ||
         udfClass == GenericUDFToDate.class || udfClass == GenericUDFToDecimal.class ||

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
index 5631347..20cc894 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
@@ -440,6 +440,7 @@ public class VectorizationContext {
   // Set of UDF classes for type casting data types in row-mode.
   private static Set<Class<?>> castExpressionUdfs = new HashSet<Class<?>>();
   static {
+    castExpressionUdfs.add(GenericUDFToString.class);
     castExpressionUdfs.add(GenericUDFToDecimal.class);
     castExpressionUdfs.add(GenericUDFToBinary.class);
     castExpressionUdfs.add(GenericUDFToDate.class);
@@ -454,7 +455,6 @@ public class VectorizationContext {
     castExpressionUdfs.add(UDFToBoolean.class);
     castExpressionUdfs.add(UDFToDouble.class);
     castExpressionUdfs.add(UDFToFloat.class);
-    castExpressionUdfs.add(UDFToString.class);
     castExpressionUdfs.add(UDFToInteger.class);
     castExpressionUdfs.add(UDFToLong.class);
     castExpressionUdfs.add(UDFToShort.class);
@@ -1139,7 +1139,7 @@ public class VectorizationContext {
         udfClass = new UDFToDouble();
         break;
       case STRING:
-        udfClass = new UDFToString();
+        genericUdf = new GenericUDFToString();
         break;
       case CHAR:
         genericUdf = new GenericUDFToChar();
@@ -1207,11 +1207,7 @@ public class VectorizationContext {
           || udfClass.equals(UDFConv.class)
           || udfClass.equals(UDFFromUnixTime.class) && isIntFamily(arg0Type(expr))
           || isCastToIntFamily(udfClass) && isStringFamily(arg0Type(expr))
-          || isCastToFloatFamily(udfClass) && isStringFamily(arg0Type(expr))
-          || udfClass.equals(UDFToString.class) &&
-               (arg0Type(expr).equals("timestamp")
-                   || arg0Type(expr).equals("double")
-                   || arg0Type(expr).equals("float"))) {
+          || isCastToFloatFamily(udfClass) && isStringFamily(arg0Type(expr))) {
         return true;
       }
     } else if ((gudf instanceof GenericUDFTimestamp && isStringFamily(arg0Type(expr)))
@@ -1229,16 +1225,13 @@ public class VectorizationContext {
             || gudf instanceof GenericUDFCase
             || gudf instanceof GenericUDFWhen) {
       return true;
-    } else if (gudf instanceof GenericUDFToChar &&
+    } else if ((gudf instanceof GenericUDFToString
+                   || gudf instanceof GenericUDFToChar
+                   || gudf instanceof GenericUDFToVarchar) &&
                (arg0Type(expr).equals("timestamp")
                    || arg0Type(expr).equals("double")
                    || arg0Type(expr).equals("float"))) {
       return true;
-    } else if (gudf instanceof GenericUDFToVarchar &&
-            (arg0Type(expr).equals("timestamp")
-                || arg0Type(expr).equals("double")
-                || arg0Type(expr).equals("float"))) {
-      return true;
     } else if (gudf instanceof GenericUDFBetween && (mode == VectorExpressionDescriptor.Mode.PROJECTION)) {
       // between has 4 args here, but can be vectorized like this
       return true;
@@ -2061,6 +2054,8 @@ public class VectorizationContext {
     } else if (udf instanceof GenericUDFBridge) {
       ve = getGenericUDFBridgeVectorExpression((GenericUDFBridge) udf, childExpr, mode,
           returnType);
+    } else if (udf instanceof GenericUDFToString) {
+      ve = getCastToString(childExpr, returnType);
     } else if (udf instanceof GenericUDFToDecimal) {
       ve = getCastToDecimal(childExpr, returnType);
     } else if (udf instanceof GenericUDFToChar) {
@@ -2508,8 +2503,6 @@ public class VectorizationContext {
       ve = getCastToBoolean(childExpr);
     } else if (isCastToFloatFamily(cl)) {
       ve = getCastToDoubleExpression(cl, childExpr, returnType);
-    } else if (cl.equals(UDFToString.class)) {
-      ve = getCastToString(childExpr, returnType);
     }
     if (ve == null && childExpr instanceof ExprNodeGenericFuncDesc) {
       ve = getCustomUDFExpression((ExprNodeGenericFuncDesc) childExpr, mode);
@@ -2585,7 +2578,8 @@ public class VectorizationContext {
       return ((Number) scalar).toString();
     case DECIMAL:
       HiveDecimal decimalVal = (HiveDecimal) scalar;
-      return decimalVal.toString();
+      DecimalTypeInfo decType = (DecimalTypeInfo) type;
+      return decimalVal.toFormatString(decType.getScale());
     default:
       throw new HiveException("Unsupported type "+typename+" for cast to String");
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java
index a3a0e9d..77bfe13 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java
@@ -60,7 +60,7 @@ public class CastDecimalToString extends DecimalToStringUnaryUDF {
   @Override
   protected void func(BytesColumnVector outV, DecimalColumnVector inV, int i) {
     HiveDecimalWritable decWritable = inV.vector[i];
-    final int byteIndex = decWritable.toBytes(scratchBuffer);
+    final int byteIndex = decWritable.toFormatBytes(inV.scale, scratchBuffer);
     assign(outV, i, scratchBuffer, byteIndex, HiveDecimal.SCRATCH_BUFFER_LEN_TO_BYTES - byteIndex);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
index 6a3f3b4..da31f4d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToBinary;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToChar;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDate;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDecimal;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToString;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUtcTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToVarchar;
@@ -194,6 +195,7 @@ public class IndexPredicateAnalyzer {
     GenericUDF udf = funcDesc.getGenericUDF();
     // check if its a simple cast expression.
     if ((udf instanceof GenericUDFBridge || udf instanceof GenericUDFToBinary
+        || udf instanceof GenericUDFToString
         || udf instanceof GenericUDFToChar || udf instanceof GenericUDFToVarchar
         || udf instanceof GenericUDFToDecimal || udf instanceof GenericUDFToDate
         || udf instanceof GenericUDFToUnixTimeStamp || udf instanceof GenericUDFToUtcTimestamp)

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
index 0a9ab2b..f544f58 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
@@ -84,6 +84,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToBinary;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToChar;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDate;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDecimal;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToString;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToTimestampLocalTZ;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToVarchar;
@@ -376,7 +377,7 @@ public class RexNodeConverter {
           if (udfClassName.equals("UDFToBoolean") || udfClassName.equals("UDFToByte")
               || udfClassName.equals("UDFToDouble") || udfClassName.equals("UDFToInteger")
               || udfClassName.equals("UDFToLong") || udfClassName.equals("UDFToShort")
-              || udfClassName.equals("UDFToFloat") || udfClassName.equals("UDFToString"))
+              || udfClassName.equals("UDFToFloat"))
             castExpr = true;
         }
       }
@@ -392,6 +393,7 @@ public class RexNodeConverter {
     if (childRexNodeLst != null && childRexNodeLst.size() == 1) {
       GenericUDF udf = func.getGenericUDF();
       if ((udf instanceof GenericUDFToChar) || (udf instanceof GenericUDFToVarchar)
+          || (udf instanceof GenericUDFToString)
           || (udf instanceof GenericUDFToDecimal) || (udf instanceof GenericUDFToDate)
           || (udf instanceof GenericUDFTimestamp) || (udf instanceof GenericUDFToTimestampLocalTZ)
           || (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
index 40bd075..eb5b1a8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
@@ -206,7 +206,6 @@ import org.apache.hadoop.hive.ql.udf.UDFToFloat;
 import org.apache.hadoop.hive.ql.udf.UDFToInteger;
 import org.apache.hadoop.hive.ql.udf.UDFToLong;
 import org.apache.hadoop.hive.ql.udf.UDFToShort;
-import org.apache.hadoop.hive.ql.udf.UDFToString;
 import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
 import org.apache.hadoop.hive.ql.udf.UDFYear;
 import org.apache.hadoop.hive.ql.udf.generic.*;
@@ -488,7 +487,7 @@ public class Vectorizer implements PhysicalPlanResolver {
     supportedGenericUDFs.add(UDFToBoolean.class);
     supportedGenericUDFs.add(UDFToFloat.class);
     supportedGenericUDFs.add(UDFToDouble.class);
-    supportedGenericUDFs.add(UDFToString.class);
+    supportedGenericUDFs.add(GenericUDFToString.class);
     supportedGenericUDFs.add(GenericUDFTimestamp.class);
     supportedGenericUDFs.add(GenericUDFToDecimal.class);
     supportedGenericUDFs.add(GenericUDFToDate.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
deleted file mode 100755
index a16d429..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
+++ /dev/null
@@ -1,181 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.udf;
-
-import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
-import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
-import org.apache.hadoop.hive.serde2.lazy.LazyLong;
-import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.FloatWritable;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Text;
-
-/**
- * UDFToString.
- *
- */
-public class UDFToString extends UDF {
-  private final Text t = new Text();
-  private final ByteStream.Output out = new ByteStream.Output();
-
-  public UDFToString() {
-  }
-
-  public Text evaluate(NullWritable i) {
-    return null;
-  }
-
-  private final byte[] trueBytes = {'T', 'R', 'U', 'E'};
-  private final byte[] falseBytes = {'F', 'A', 'L', 'S', 'E'};
-
-  public Text evaluate(BooleanWritable i) {
-    if (i == null) {
-      return null;
-    } else {
-      t.clear();
-      t.set(i.get() ? trueBytes : falseBytes);
-      return t;
-    }
-  }
-
-  public Text evaluate(ByteWritable i) {
-    if (i == null) {
-      return null;
-    } else {
-      out.reset();
-      LazyInteger.writeUTF8NoException(out, i.get());
-      t.set(out.getData(), 0, out.getLength());
-      return t;
-    }
-  }
-
-  public Text evaluate(ShortWritable i) {
-    if (i == null) {
-      return null;
-    } else {
-      out.reset();
-      LazyInteger.writeUTF8NoException(out, i.get());
-      t.set(out.getData(), 0, out.getLength());
-      return t;
-    }
-  }
-
-  public Text evaluate(IntWritable i) {
-    if (i == null) {
-      return null;
-    } else {
-      out.reset();
-      LazyInteger.writeUTF8NoException(out, i.get());
-      t.set(out.getData(), 0, out.getLength());
-      return t;
-    }
-  }
-
-  public Text evaluate(LongWritable i) {
-    if (i == null) {
-      return null;
-    } else {
-      out.reset();
-      LazyLong.writeUTF8NoException(out, i.get());
-      t.set(out.getData(), 0, out.getLength());
-      return t;
-    }
-  }
-
-  public Text evaluate(FloatWritable i) {
-    if (i == null) {
-      return null;
-    } else {
-      t.set(i.toString());
-      return t;
-    }
-  }
-
-  public Text evaluate(DoubleWritable i) {
-    if (i == null) {
-      return null;
-    } else {
-      t.set(i.toString());
-      return t;
-    }
-  }
-
-  public Text evaluate(Text i) {
-      if (i == null) {
-          return null;
-      }
-      i.set(i.toString());
-      return i;
-  }
-
-  public Text evaluate(DateWritableV2 d) {
-    if (d == null) {
-      return null;
-    } else {
-      t.set(d.toString());
-      return t;
-    }
-  }
-
-  public Text evaluate(TimestampWritableV2 i) {
-    if (i == null) {
-      return null;
-    } else {
-      t.set(i.toString());
-      return t;
-    }
-  }
-
-  public Text evaluate(TimestampLocalTZWritable i) {
-    if (i == null) {
-      return null;
-    } else {
-      t.set(i.toString());
-      return t;
-    }
-  }
-
-  public Text evaluate(HiveDecimalWritable i) {
-    if (i == null) {
-      return null;
-    } else {
-      t.set(i.toString());
-      return t;
-    }
-  }
-
-  public Text evaluate (BytesWritable bw) {
-    if (null == bw) {
-      return null;
-    }
-    t.set(bw.getBytes(),0,bw.getLength());
-    return t;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToString.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToString.java
new file mode 100644
index 0000000..d576441
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToString.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TextConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "string",
+value = "CAST(<value> as STRING) - Converts the argument to a string value.",
+extended = "Example:\n "
++ "  > SELECT CAST(1234 AS string) FROM src LIMIT 1;\n"
++ "  '1234'")
+public class GenericUDFToString extends GenericUDF {
+  private static final Logger LOG = LoggerFactory.getLogger(GenericUDFToString.class.getName());
+
+  private transient PrimitiveObjectInspector argumentOI;
+  private transient TextConverter converter;
+
+  public GenericUDFToString() {
+  }
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    if (arguments.length != 1) {
+      throw new UDFArgumentException("STRING cast requires a value argument");
+    }
+    try {
+      argumentOI = (PrimitiveObjectInspector) arguments[0];
+    } catch (ClassCastException e) {
+      throw new UDFArgumentException(
+          "The function STRING takes only primitive types");
+    }
+
+    converter = new TextConverter(argumentOI);
+    return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+      Object o0 = arguments[0].get();
+      if (o0 == null) {
+        return null;
+      }
+
+      return converter.convert(o0);
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert (children.length == 1);
+    StringBuilder sb = new StringBuilder();
+    sb.append("CAST( ");
+    sb.append(children[0]);
+    sb.append(" AS STRING)");
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
index 4ed087e..e9be8c1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
@@ -435,18 +435,18 @@ public class TestVectorTypeCasts {
     BytesColumnVector r = (BytesColumnVector) b.cols[1];
 
     // As of HIVE-8745, these decimal values should be trimmed of trailing zeros.
-    byte[] v = toBytes("1.1");
+    byte[] v = toBytes("1.10");
     assertTrue(((Integer) v.length).toString() + " " + r.length[0], v.length == r.length[0]);
     Assert.assertEquals(0,
         StringExpr.compare(v, 0, v.length,
             r.vector[0], r.start[0], r.length[0]));
 
-    v = toBytes("-2.2");
+    v = toBytes("-2.20");
     Assert.assertEquals(0,
         StringExpr.compare(v, 0, v.length,
             r.vector[1], r.start[1], r.length[1]));
 
-    v = toBytes("9999999999999999");
+    v = toBytes("9999999999999999.00");
     Assert.assertEquals(0,
         StringExpr.compare(v, 0, v.length,
             r.vector[2], r.start[2], r.length[2]));

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/autoColumnStats_6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_6.q.out b/ql/src/test/results/clientpositive/autoColumnStats_6.q.out
index 70788fd..98b5bc0 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_6.q.out
+++ b/ql/src/test/results/clientpositive/autoColumnStats_6.q.out
@@ -54,7 +54,7 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                     name: default.orcfile_merge2a
               Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string), UDFToString(_col3) (type: string)
+                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string), CAST( _col3 AS STRING) (type: string)
                 outputColumnNames: key, value, one, two, three
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out
index 7764706..6da9934 100644
--- a/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out
+++ b/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out
@@ -233,7 +233,7 @@ STAGE PLANS:
                   Position of Big Table: 1
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                   Select Operator
-                    expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                    expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                     outputColumnNames: _col0, _col1, _col2
                     Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                     File Output Operator
@@ -684,7 +684,7 @@ STAGE PLANS:
                   Position of Big Table: 1
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                   Select Operator
-                    expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                    expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                     outputColumnNames: _col0, _col1, _col2
                     Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                     File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out
index 090a0db..b086c0e 100644
--- a/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out
+++ b/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out
@@ -217,7 +217,7 @@ STAGE PLANS:
                   Position of Big Table: 0
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                   Select Operator
-                    expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                    expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                     outputColumnNames: _col0, _col1, _col2
                     Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                     File Output Operator
@@ -668,7 +668,7 @@ STAGE PLANS:
                   Position of Big Table: 0
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                   Select Operator
-                    expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                    expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                     outputColumnNames: _col0, _col1, _col2
                     Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                     File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out
index 692b8e7..bd91b44 100644
--- a/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out
+++ b/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out
@@ -217,7 +217,7 @@ STAGE PLANS:
                   Position of Big Table: 1
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                   Select Operator
-                    expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                    expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                     outputColumnNames: _col0, _col1, _col2
                     Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                     File Output Operator
@@ -668,7 +668,7 @@ STAGE PLANS:
                   Position of Big Table: 1
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                   Select Operator
-                    expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                    expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                     outputColumnNames: _col0, _col1, _col2
                     Statistics: Num rows: 163 Data size: 63932 Basic stats: PARTIAL Column stats: NONE
                     File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin5.q.out b/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
index 79da333..a4d5e4e 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
@@ -239,7 +239,7 @@ STAGE PLANS:
                 Statistics: Num rows: 327 Data size: 127864 Basic stats: PARTIAL Column stats: NONE
                 BucketMapJoin: true
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col6 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col6 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 327 Data size: 127864 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator
@@ -843,7 +843,7 @@ STAGE PLANS:
                 Statistics: Num rows: 171 Data size: 67364 Basic stats: PARTIAL Column stats: NONE
                 BucketMapJoin: true
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col6 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col6 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 171 Data size: 67364 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
index 1a53c37..10c91cd 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
@@ -180,7 +180,7 @@ STAGE PLANS:
                 Position of Big Table: 0
                 Statistics: Num rows: 154 Data size: 46200 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col6 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col6 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 154 Data size: 46200 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
index 426b051..3009c47 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
@@ -244,7 +244,7 @@ STAGE PLANS:
                 Statistics: Num rows: 171 Data size: 67364 Basic stats: PARTIAL Column stats: NONE
                 BucketMapJoin: true
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col6 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col6 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 171 Data size: 67364 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/bucketsortoptimize_insert_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucketsortoptimize_insert_3.q.out b/ql/src/test/results/clientpositive/bucketsortoptimize_insert_3.q.out
index b798fa2..6db8bfb 100644
--- a/ql/src/test/results/clientpositive/bucketsortoptimize_insert_3.q.out
+++ b/ql/src/test/results/clientpositive/bucketsortoptimize_insert_3.q.out
@@ -179,7 +179,7 @@ STAGE PLANS:
                 value expressions: _col0 (type: int), _col1 (type: string)
       Reduce Operator Tree:
         Select Operator
-          expressions: UDFToString(VALUE._col0) (type: string), UDFToInteger(VALUE._col1) (type: int)
+          expressions: CAST( VALUE._col0 AS STRING) (type: string), UDFToInteger(VALUE._col1) (type: int)
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
           File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/char_pad_convert.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/char_pad_convert.q.out b/ql/src/test/results/clientpositive/char_pad_convert.q.out
index b63dc25..df3348f 100644
--- a/ql/src/test/results/clientpositive/char_pad_convert.q.out
+++ b/ql/src/test/results/clientpositive/char_pad_convert.q.out
@@ -134,7 +134,7 @@ POSTHOOK: query: select lpad(f, 4, ' '),
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1k_n6
 #### A masked pattern was here ####
-74.7	42	zzzzzTRUE	20	ddd45.4	yard du
+74.7	42	zzzzzTRUE	20	dd45.40	yard du
 26.4	37	zzzzzTRUE	20	dd29.62	history
 96.9	18	zzzzFALSE	20	dd27.32	history
 13.0	34	zzzzFALSE	20	dd23.91	topolog
@@ -180,7 +180,7 @@ POSTHOOK: query: select rpad(f, 4, ' '),
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1k_n6
 #### A masked pattern was here ####
-74.7	42	TRUEzzzzz	20	45.4ddd	yard du
+74.7	42	TRUEzzzzz	20	45.40dd	yard du
 26.4	37	TRUEzzzzz	20	29.62dd	history
 96.9	18	FALSEzzzz	20	27.32dd	history
 13.0	34	FALSEzzzz	20	23.91dd	topolog

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out b/ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out
index 4b0aaab..cd0c969 100644
--- a/ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out
+++ b/ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out
@@ -56,7 +56,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/decimal_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/decimal_2.q.out b/ql/src/test/results/clientpositive/decimal_2.q.out
index 22f4c33..aeebf9e 100644
--- a/ql/src/test/results/clientpositive/decimal_2.q.out
+++ b/ql/src/test/results/clientpositive/decimal_2.q.out
@@ -100,7 +100,7 @@ POSTHOOK: query: select cast(t as string) from decimal_2_n1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_2_n1
 #### A masked pattern was here ####
-17.29
+17.290000000
 PREHOOK: query: insert overwrite table decimal_2_n1
   select cast('3404045.5044003' as decimal(18,9)) from src tablesample (1 rows)
 PREHOOK: type: QUERY
@@ -183,7 +183,7 @@ POSTHOOK: query: select cast(t as string) from decimal_2_n1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_2_n1
 #### A masked pattern was here ####
-3404045.5044003
+3404045.504400300
 PREHOOK: query: select cast(3.14 as decimal(4,2)) from decimal_2_n1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_2_n1

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby12.q.out b/ql/src/test/results/clientpositive/groupby12.q.out
index 7317c4e..43fdf6a 100644
--- a/ql/src/test/results/clientpositive/groupby12.q.out
+++ b/ql/src/test/results/clientpositive/groupby12.q.out
@@ -45,7 +45,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col1) (type: int), UDFToString(_col2) (type: string)
+            expressions: UDFToInteger(_col1) (type: int), CAST( _col2 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby5.q.out b/ql/src/test/results/clientpositive/groupby5.q.out
index b3c2da2..7e9d928 100644
--- a/ql/src/test/results/clientpositive/groupby5.q.out
+++ b/ql/src/test/results/clientpositive/groupby5.q.out
@@ -77,7 +77,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby5_noskew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby5_noskew.q.out b/ql/src/test/results/clientpositive/groupby5_noskew.q.out
index 5307bc7..8a7b1aa 100644
--- a/ql/src/test/results/clientpositive/groupby5_noskew.q.out
+++ b/ql/src/test/results/clientpositive/groupby5_noskew.q.out
@@ -50,7 +50,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby7_map.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby7_map.q.out b/ql/src/test/results/clientpositive/groupby7_map.q.out
index 5b4b9af..c27f02e 100644
--- a/ql/src/test/results/clientpositive/groupby7_map.q.out
+++ b/ql/src/test/results/clientpositive/groupby7_map.q.out
@@ -81,7 +81,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -176,7 +176,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out b/ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out
index 00d26b9..294d2ec 100644
--- a/ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out
+++ b/ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out
@@ -61,7 +61,7 @@ STAGE PLANS:
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+              expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
               outputColumnNames: _col0, _col1
               Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
@@ -94,7 +94,7 @@ STAGE PLANS:
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+              expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
               outputColumnNames: _col0, _col1
               Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
               File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby7_map_skew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby7_map_skew.q.out b/ql/src/test/results/clientpositive/groupby7_map_skew.q.out
index 089a14e..10a3ae4 100644
--- a/ql/src/test/results/clientpositive/groupby7_map_skew.q.out
+++ b/ql/src/test/results/clientpositive/groupby7_map_skew.q.out
@@ -108,7 +108,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -228,7 +228,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby7_noskew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby7_noskew.q.out b/ql/src/test/results/clientpositive/groupby7_noskew.q.out
index ec83bb6..aaa09b8 100644
--- a/ql/src/test/results/clientpositive/groupby7_noskew.q.out
+++ b/ql/src/test/results/clientpositive/groupby7_noskew.q.out
@@ -70,7 +70,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -160,7 +160,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out b/ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out
index c844e51..abe4ff9 100644
--- a/ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out
+++ b/ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out
@@ -101,7 +101,7 @@ STAGE PLANS:
             Number of rows: 10
             Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+              expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
               outputColumnNames: _col0, _col1
               Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
@@ -192,7 +192,7 @@ STAGE PLANS:
             Number of rows: 10
             Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+              expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
               outputColumnNames: _col0, _col1
               Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
               File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby8.q.out b/ql/src/test/results/clientpositive/groupby8.q.out
index 2d8ea6f..ceb8a5b 100644
--- a/ql/src/test/results/clientpositive/groupby8.q.out
+++ b/ql/src/test/results/clientpositive/groupby8.q.out
@@ -98,7 +98,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -235,7 +235,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -1049,7 +1049,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -1186,7 +1186,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby8_map.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby8_map.q.out b/ql/src/test/results/clientpositive/groupby8_map.q.out
index cf3ade6..529970c 100644
--- a/ql/src/test/results/clientpositive/groupby8_map.q.out
+++ b/ql/src/test/results/clientpositive/groupby8_map.q.out
@@ -60,7 +60,7 @@ STAGE PLANS:
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+              expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
               outputColumnNames: _col0, _col1
               Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
@@ -93,7 +93,7 @@ STAGE PLANS:
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+              expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
               outputColumnNames: _col0, _col1
               Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
               File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby8_map_skew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby8_map_skew.q.out b/ql/src/test/results/clientpositive/groupby8_map_skew.q.out
index 86598aa..f5bcc5a 100644
--- a/ql/src/test/results/clientpositive/groupby8_map_skew.q.out
+++ b/ql/src/test/results/clientpositive/groupby8_map_skew.q.out
@@ -107,7 +107,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -226,7 +226,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby8_noskew.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby8_noskew.q.out b/ql/src/test/results/clientpositive/groupby8_noskew.q.out
index a3d3656..ed6c3a5 100644
--- a/ql/src/test/results/clientpositive/groupby8_noskew.q.out
+++ b/ql/src/test/results/clientpositive/groupby8_noskew.q.out
@@ -60,7 +60,7 @@ STAGE PLANS:
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+              expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
               outputColumnNames: _col0, _col1
               Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
@@ -88,7 +88,7 @@ STAGE PLANS:
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+              expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
               outputColumnNames: _col0, _col1
               Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
               File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby9.q.out b/ql/src/test/results/clientpositive/groupby9.q.out
index 8d97bdf..98d437f 100644
--- a/ql/src/test/results/clientpositive/groupby9.q.out
+++ b/ql/src/test/results/clientpositive/groupby9.q.out
@@ -80,7 +80,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -174,7 +174,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
             outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -953,7 +953,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -1047,7 +1047,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), UDFToString(_col2) (type: string)
+            expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), CAST( _col2 AS STRING) (type: string)
             outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -1826,7 +1826,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -1920,7 +1920,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
             outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -2701,7 +2701,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -2796,7 +2796,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
             outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -3575,7 +3575,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -3669,7 +3669,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), UDFToString(_col2) (type: string)
+            expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), CAST( _col2 AS STRING) (type: string)
             outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out b/ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out
index 5f08df6..caa031b 100644
--- a/ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out
+++ b/ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out
@@ -84,7 +84,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string), UDFToString(_col2) (type: string)
+            expressions: _col0 (type: string), CAST( _col2 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby_position.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_position.q.out b/ql/src/test/results/clientpositive/groupby_position.q.out
index 5678bd5..86a2dcb 100644
--- a/ql/src/test/results/clientpositive/groupby_position.q.out
+++ b/ql/src/test/results/clientpositive/groupby_position.q.out
@@ -78,7 +78,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -172,7 +172,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
             outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -357,7 +357,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col0) (type: int), UDFToString(_col1) (type: string)
+            expressions: UDFToInteger(_col0) (type: int), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -451,7 +451,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), UDFToString(_col2) (type: string)
+            expressions: UDFToInteger(_col1) (type: int), _col0 (type: string), CAST( _col2 AS STRING) (type: string)
             outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out b/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
index aaf89ae..033d00b 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
@@ -3733,7 +3733,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToString(_col0) (type: string), _col1 (type: bigint)
+            expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: bigint)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out b/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
index e8a1143..1eddcf5 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
@@ -3149,7 +3149,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToInteger(UDFToString(_col0)) (type: int), UDFToInteger(_col1) (type: int)
+            expressions: UDFToInteger(CAST( _col0 AS STRING)) (type: int), UDFToInteger(_col1) (type: int)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -4083,7 +4083,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToString(_col0) (type: string), _col1 (type: bigint)
+            expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: bigint)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out b/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
index fd9e553..aceb845 100644
--- a/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
+++ b/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
@@ -454,7 +454,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string), UDFToString(_col1) (type: string), if(((UDFToDouble(_col0) % 100.0D) = 0.0D), '11', '12') (type: string)
+            expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string), if(((UDFToDouble(_col0) % 100.0D) = 0.0D), '11', '12') (type: string)
             outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/infer_bucket_sort_grouping_operators.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/infer_bucket_sort_grouping_operators.q.out b/ql/src/test/results/clientpositive/infer_bucket_sort_grouping_operators.q.out
index 50cc4dc..cfd190f 100644
--- a/ql/src/test/results/clientpositive/infer_bucket_sort_grouping_operators.q.out
+++ b/ql/src/test/results/clientpositive/infer_bucket_sort_grouping_operators.q.out
@@ -59,7 +59,7 @@ STAGE PLANS:
           Statistics: Num rows: 750 Data size: 7968 Basic stats: COMPLETE Column stats: NONE
           pruneGroupingSetId: true
           Select Operator
-            expressions: _col0 (type: string), _col1 (type: string), UDFToString(_col3) (type: string)
+            expressions: _col0 (type: string), _col1 (type: string), CAST( _col3 AS STRING) (type: string)
             outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 750 Data size: 7968 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -1540,7 +1540,7 @@ STAGE PLANS:
           Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
           pruneGroupingSetId: true
           Select Operator
-            expressions: _col0 (type: string), _col1 (type: string), UDFToString(_col3) (type: string)
+            expressions: _col0 (type: string), _col1 (type: string), CAST( _col3 AS STRING) (type: string)
             outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -1767,7 +1767,7 @@ STAGE PLANS:
           Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
           pruneGroupingSetId: true
           Select Operator
-            expressions: _col0 (type: string), _col1 (type: string), UDFToString(_col3) (type: string)
+            expressions: _col0 (type: string), _col1 (type: string), CAST( _col3 AS STRING) (type: string)
             outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             File Output Operator


[10/11] hive git commit: HIVE-19532 : fix tests for master-txnstats branch - fix one more out (Sergey Shelukhin)

Posted by se...@apache.org.
HIVE-19532 : fix tests for master-txnstats branch - fix one more out  (Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7bd688b2
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7bd688b2
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7bd688b2

Branch: refs/heads/master-txnstats
Commit: 7bd688b27a6988c9a242015eeb4450784c164049
Parents: 9f9ae73
Author: sergey <se...@apache.org>
Authored: Tue Jul 24 12:39:25 2018 -0700
Committer: sergey <se...@apache.org>
Committed: Tue Jul 24 12:39:25 2018 -0700

----------------------------------------------------------------------
 .../test/queries/clientpositive/stats_part2.q   | 12 +++------
 .../results/clientpositive/stats_part2.q.out    | 28 ++++++++++----------
 2 files changed, 17 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/7bd688b2/ql/src/test/queries/clientpositive/stats_part2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/stats_part2.q b/ql/src/test/queries/clientpositive/stats_part2.q
index 24be218..068e928 100644
--- a/ql/src/test/queries/clientpositive/stats_part2.q
+++ b/ql/src/test/queries/clientpositive/stats_part2.q
@@ -15,6 +15,8 @@ set hive.support.concurrency=true;
 set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
 set hive.query.results.cache.enabled=false;
 
+set metastore.aggregate.stats.cache.enabled=false;
+
 -- create source.
 drop table if exists mysource;
 create table mysource (p int, key int, value string);
@@ -22,22 +24,14 @@ insert into mysource values (100,20,'value20'), (101,40,'string40'), (102,50,'st
 insert into mysource values (100,21,'value21'), (101,41,'value41'), (102,51,'value51');
 
 -- test partitioned table
-drop table if exists stats_partitioned;
+drop table if exists stats_part;
 
---create table stats_part(key int,value string) partitioned by (p int) stored as orc;
 create table stats_part(key int,value string) partitioned by (p int) stored as orc tblproperties ("transactional"="true");
---create table stats_part(key int,value string) partitioned by (p int) stored as orc tblproperties ("transactional"="true", "transactional_properties"="insert_only");
 
---explain select count(*) from stats_part;
---select count(*) from stats_part;
---explain select count(*) from stats_part where p = 100;
---select count(*) from stats_part where p = 100;
 explain select count(*) from stats_part where p > 100;
 explain select max(key) from stats_part where p > 100;
---select count(*) from stats_part where p > 100;
 desc formatted stats_part;
 
---explain insert into table stats_part partition(p=100) select distinct key, value from mysource where p == 100;
 insert into table stats_part partition(p=100) select distinct key, value from mysource where p == 100;
 insert into table stats_part partition(p=101) select distinct key, value from mysource where p == 101;
 insert into table stats_part partition(p=102) select distinct key, value from mysource where p == 102;

http://git-wip-us.apache.org/repos/asf/hive/blob/7bd688b2/ql/src/test/results/clientpositive/stats_part2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats_part2.q.out b/ql/src/test/results/clientpositive/stats_part2.q.out
index 9c22ce7..dfdf7b3 100644
--- a/ql/src/test/results/clientpositive/stats_part2.q.out
+++ b/ql/src/test/results/clientpositive/stats_part2.q.out
@@ -32,9 +32,9 @@ POSTHOOK: Output: default@mysource
 POSTHOOK: Lineage: mysource.key SCRIPT []
 POSTHOOK: Lineage: mysource.p SCRIPT []
 POSTHOOK: Lineage: mysource.value SCRIPT []
-PREHOOK: query: drop table if exists stats_partitioned
+PREHOOK: query: drop table if exists stats_part
 PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table if exists stats_partitioned
+POSTHOOK: query: drop table if exists stats_part
 POSTHOOK: type: DROPTABLE
 PREHOOK: query: create table stats_part(key int,value string) partitioned by (p int) stored as orc tblproperties ("transactional"="true")
 PREHOOK: type: CREATETABLE
@@ -594,19 +594,19 @@ STAGE PLANS:
           TableScan
             alias: stats_part
             filterExpr: (p > 100) (type: boolean)
-            Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
             Select Operator
               expressions: key (type: int)
               outputColumnNames: key
-              Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: PARTIAL
+              Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
               Group By Operator
                 aggregations: max(key)
                 mode: hash
                 outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: PARTIAL
+                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   sort order: 
-                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: PARTIAL
+                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: int)
       Execution mode: vectorized
       Reduce Operator Tree:
@@ -614,10 +614,10 @@ STAGE PLANS:
           aggregations: max(VALUE._col0)
           mode: mergepartial
           outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: PARTIAL
+          Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1185,19 +1185,19 @@ STAGE PLANS:
           TableScan
             alias: stats_part
             filterExpr: (p > 100) (type: boolean)
-            Statistics: Num rows: 5 Data size: 20 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 5 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE
             Select Operator
               expressions: key (type: int)
               outputColumnNames: key
-              Statistics: Num rows: 5 Data size: 20 Basic stats: COMPLETE Column stats: PARTIAL
+              Statistics: Num rows: 5 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE
               Group By Operator
                 aggregations: max(key)
                 mode: hash
                 outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: PARTIAL
+                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   sort order: 
-                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: PARTIAL
+                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: int)
       Execution mode: vectorized
       Reduce Operator Tree:
@@ -1205,10 +1205,10 @@ STAGE PLANS:
           aggregations: max(VALUE._col0)
           mode: mergepartial
           outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: PARTIAL
+          Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat


[04/11] hive git commit: HIVE-20082: HiveDecimal to string conversion doesn't format the decimal correctly (Jason Dere, reviewed by Ashutosh Chauhan)

Posted by se...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/vector_case_when_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_case_when_1.q.out b/ql/src/test/results/clientpositive/vector_case_when_1.q.out
index 59d8133..4151082 100644
--- a/ql/src/test/results/clientpositive/vector_case_when_1.q.out
+++ b/ql/src/test/results/clientpositive/vector_case_when_1.q.out
@@ -199,7 +199,7 @@ STAGE PLANS:
             alias: lineitem_test
             Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP      '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'DELIVER
  IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date)
+              expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP      '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((CAST( l_shipinstruct AS STRING) = 'DEL
 IVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
               Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
@@ -510,13 +510,13 @@ STAGE PLANS:
                 native: true
                 vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2)/DECIMAL_64, 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
             Select Operator
-              expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP      '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'DELIVER
  IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date)
+              expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP      '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((CAST( l_shipinstruct AS STRING) = 'DEL
 IVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
                   projectedOutputColumnNums: [4, 22, 24, 25, 26, 27, 28, 30, 31, 32, 33, 34, 35, 38, 40, 43, 44]
-                  selectExpressions: IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 21:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 22:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 21:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprStringScalarStringScalar(col 20:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 21:string) -> 22:string) -> 21:string) -> 22:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 24:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val 
 Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprColumnNull(col 20:boolean, col 21:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean, ConstantVectorExpression(val Many) -> 21:string) -> 23:string) -> 24:string) -> 23:string) -> 24:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprNullNull(null, null) -> 23:string) -> 25:string) -> 23:string) -> 25:string, IfExprLongColumnLongColumn(col 17:boolean, col 18:date, col 19:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 17:boolean, VectorUDFDateAddColScalar(co
 l 10:date, val 10) -> 18:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 19:date) -> 26:date, IfExprDoubleColumnLongScalar(col 17:boolean, col 28:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 27:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 27:double) -> 28:double) -> 27:double, IfExprDoubleColumnDoubleScalar(col 17:boolean, col 29:double, val 0.0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 28:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 28:double) -> 29:double) -> 28:double, IfExprNullColumn(col 17:boolean, null, col 46)(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 17:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) ->
  46:decimal(10,2)) -> 30:decimal(10,2), IfExprColumnNull(col 18:boolean, col 47:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 18:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 47:decimal(10,2)) -> 31:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 32:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 33:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: String
 GroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 34:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 35:decimal(10,2), IfExprTimestampColumnColumn(col 19:boolean, col 36:timestampcol 37:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 19:boolean, CastDateToTimestamp(col 12:date) -> 36:timestamp, CastDateToTimestamp(col 11:date) -> 37:timestamp) -> 38:timestamp, IfExprColumnNull(col 19:boolean, col 39:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 19:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 39:int) -> 40:int, IfExprNullColumn(col 41:boolean, null, col 42)(children: LongColGreaterLongScalar(col 2:int, val 10000) 
 -> 41:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 42:int) -> 43:int, IfExprLongScalarLongScalar(col 45:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 44:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 44:int) -> 45:boolean) -> 44:date
+                  selectExpressions: IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 21:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 22:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 21:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprStringScalarStringScalar(col 20:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 21:string) -> 22:string) -> 21:string) -> 22:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 24:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val 
 Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprColumnNull(col 20:boolean, col 21:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean, ConstantVectorExpression(val Many) -> 21:string) -> 23:string) -> 24:string) -> 23:string) -> 24:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprNullNull(null, null) -> 23:string) -> 25:string) -> 23:string) -> 25:string, IfExprLongColumnLongColumn(col 17:boolean, col 18:date, col 19:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 17:boolean, VectorUDFDateAddColScalar(co
 l 10:date, val 10) -> 18:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 19:date) -> 26:date, IfExprDoubleColumnLongScalar(col 17:boolean, col 28:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 27:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 27:double) -> 28:double) -> 27:double, IfExprDoubleColumnDoubleScalar(col 17:boolean, col 29:double, val 0.0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 28:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 28:double) -> 29:double) -> 28:double, IfExprNullColumn(col 17:boolean, null, col 46)(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 17:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) ->
  46:decimal(10,2)) -> 30:decimal(10,2), IfExprColumnNull(col 18:boolean, col 47:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 18:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 47:decimal(10,2)) -> 31:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 32:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 33:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax))(chil
 dren: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 34:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 35:decimal(10,2), IfExprTimestampColumnColumn(col 19:boolean, col 36:timestampcol 37:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 19:boolean, CastDateToTimestamp(col 12:date) -> 36:timestamp, CastDateToTimestamp(col 11:date) -> 37:timestamp) -> 38:timestamp, IfExprColumnNull(col 19:boolean, col 39:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 19:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 39:int) -> 40:int, IfExprNullColumn(col 41:boolean, null, col 42)(children: LongColGreaterLongScalar(col 2:
 int, val 10000) -> 41:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 42:int) -> 43:int, IfExprLongScalarLongScalar(col 45:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 44:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 44:int) -> 45:boolean) -> 44:date
               Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
                 compressed: false
@@ -839,13 +839,13 @@ STAGE PLANS:
                 native: true
                 vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2)/DECIMAL_64, 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
             Select Operator
-              expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP      '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'DELIVER
  IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date)
+              expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP      '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((CAST( l_shipinstruct AS STRING) = 'DEL
 IVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
                   projectedOutputColumnNums: [4, 27, 38, 48, 52, 54, 60, 63, 65, 67, 68, 69, 70, 73, 76, 79, 80]
-                  selectExpressions: IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 26:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, ConstantVectorExpression(val Single) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 19:boolean, ConstantVectorExpression(val Two) -> 20:string, IfExprColumnCondExpr(col 21:boolean, col 22:stringcol 24:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 21:boolean, ConstantVectorExpression(val Some) -> 22:string, IfExprStringScalarStringScalar(col 23:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 23:boolean) -> 24:string) -> 25:string) -> 26:string) -> 27:string, IfExprColumnCondExpr(col 23:boolean, col 28:stringcol 37:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 23:boolean, ConstantVectorExpression(val Single) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col
  30:stringcol 36:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 29:boolean, ConstantVectorExpression(val Two) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 35:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 31:boolean, ConstantVectorExpression(val Some) -> 32:string, IfExprColumnNull(col 33:boolean, col 34:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 33:boolean, ConstantVectorExpression(val Many) -> 34:string) -> 35:string) -> 36:string) -> 37:string) -> 38:string, IfExprColumnCondExpr(col 39:boolean, col 40:stringcol 47:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 39:boolean, ConstantVectorExpression(val Single) -> 40:string, IfExprColumnCondExpr(col 41:boolean, col 42:stringcol 46:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 41:boolean, ConstantVectorExpression(val Two) -> 42:string, IfExprColumnCondExpr(col 43:boolean, col 44:stringcol 45:string)(children: LongColLessLongS
 calar(col 4:int, val 10) -> 43:boolean, ConstantVectorExpression(val Some) -> 44:string, IfExprNullNull(null, null) -> 45:string) -> 46:string) -> 47:string) -> 48:string, IfExprCondExprCondExpr(col 49:boolean, col 50:datecol 51:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 49:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 50:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 51:date) -> 52:date, IfExprDoubleColumnLongScalar(col 57:boolean, col 58:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 54:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 54:double) -> 58:double) -> 54:double, IfExprCondExprColumn(col 57:boolean, col 59:double, col 58:double)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 58:double)(children: DoubleScalarSubtractDoubleC
 olumn(val 1.0, col 6:double) -> 58:double) -> 59:double, ConstantVectorExpression(val 0.0) -> 58:double) -> 60:double, IfExprNullColumn(col 62:boolean, null, col 82)(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 62:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 82:decimal(10,2)) -> 63:decimal(10,2), IfExprColumnNull(col 64:boolean, col 83:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 64:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 83:decimal(10,2)) -> 65:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean
 ) -> 67:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 68:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 69:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 70:decimal(10,2), IfExprCondExprCondExpr(col 66:boolean, col 71:timestampcol 72:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 66:boolean, CastDateToTimestamp(col 12:date)
  -> 71:timestamp, CastDateToTimestamp(col 11:date) -> 72:timestamp) -> 73:timestamp, IfExprCondExprNull(col 74:boolean, col 75:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 74:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 75:int) -> 76:int, IfExprNullCondExpr(col 77:boolean, null, col 78:int)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 77:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 78:int) -> 79:int, IfExprLongScalarLongScalar(col 81:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 80:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 80:int) -> 81:boolean) -> 80:date
+                  selectExpressions: IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 26:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, ConstantVectorExpression(val Single) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 19:boolean, ConstantVectorExpression(val Two) -> 20:string, IfExprColumnCondExpr(col 21:boolean, col 22:stringcol 24:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 21:boolean, ConstantVectorExpression(val Some) -> 22:string, IfExprStringScalarStringScalar(col 23:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 23:boolean) -> 24:string) -> 25:string) -> 26:string) -> 27:string, IfExprColumnCondExpr(col 23:boolean, col 28:stringcol 37:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 23:boolean, ConstantVectorExpression(val Single) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col
  30:stringcol 36:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 29:boolean, ConstantVectorExpression(val Two) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 35:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 31:boolean, ConstantVectorExpression(val Some) -> 32:string, IfExprColumnNull(col 33:boolean, col 34:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 33:boolean, ConstantVectorExpression(val Many) -> 34:string) -> 35:string) -> 36:string) -> 37:string) -> 38:string, IfExprColumnCondExpr(col 39:boolean, col 40:stringcol 47:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 39:boolean, ConstantVectorExpression(val Single) -> 40:string, IfExprColumnCondExpr(col 41:boolean, col 42:stringcol 46:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 41:boolean, ConstantVectorExpression(val Two) -> 42:string, IfExprColumnCondExpr(col 43:boolean, col 44:stringcol 45:string)(children: LongColLessLongS
 calar(col 4:int, val 10) -> 43:boolean, ConstantVectorExpression(val Some) -> 44:string, IfExprNullNull(null, null) -> 45:string) -> 46:string) -> 47:string) -> 48:string, IfExprCondExprCondExpr(col 49:boolean, col 50:datecol 51:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 49:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 50:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 51:date) -> 52:date, IfExprDoubleColumnLongScalar(col 57:boolean, col 58:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 54:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 54:double) -> 58:double) -> 54:double, IfExprCondExprColumn(col 57:boolean, col 59:double, col 58:double)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 58:double)(children: DoubleScalarSubtractDoubleC
 olumn(val 1.0, col 6:double) -> 58:double) -> 59:double, ConstantVectorExpression(val 0.0) -> 58:double) -> 60:double, IfExprNullColumn(col 62:boolean, null, col 82)(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 62:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 82:decimal(10,2)) -> 63:decimal(10,2), IfExprColumnNull(col 64:boolean, col 83:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 64:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 83:decimal(10,2)) -> 65:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boo
 lean) -> 67:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 68:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 69:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 70:decimal(10,2), IfExprCondExprCondExpr(col 66:boolean, col 71:timestampcol 72:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 66:boolean, CastDateToTimest
 amp(col 12:date) -> 71:timestamp, CastDateToTimestamp(col 11:date) -> 72:timestamp) -> 73:timestamp, IfExprCondExprNull(col 74:boolean, col 75:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 74:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 75:int) -> 76:int, IfExprNullCondExpr(col 77:boolean, null, col 78:int)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 77:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 78:int) -> 79:int, IfExprLongScalarLongScalar(col 81:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 80:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 80:int) -> 81:boolean) -> 80:date
               Statistics: Num rows: 101 Data size: 78500 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
                 compressed: false

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out b/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out
index 7301257..260c159 100644
--- a/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out
+++ b/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out
@@ -431,7 +431,7 @@ STAGE PLANS:
                 Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE
                 HashTable Sink Operator
                   keys:
-                    0 UDFToString(_col1) (type: string)
+                    0 CAST( _col1 AS STRING) (type: string)
                     1 _col1 (type: string)
 
   Stage: Stage-2
@@ -462,7 +462,7 @@ STAGE PLANS:
                   condition map:
                        Inner Join 0 to 1
                   keys:
-                    0 UDFToString(_col1) (type: string)
+                    0 CAST( _col1 AS STRING) (type: string)
                     1 _col1 (type: string)
                   Map Join Vectorization:
                       bigTableKeyExpressions: CastStringGroupToString(col 1:char(10)) -> 3:string

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/vector_decimal_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_decimal_1.q.out b/ql/src/test/results/clientpositive/vector_decimal_1.q.out
index e616912..253ccdd 100644
--- a/ql/src/test/results/clientpositive/vector_decimal_1.q.out
+++ b/ql/src/test/results/clientpositive/vector_decimal_1.q.out
@@ -705,7 +705,7 @@ STAGE PLANS:
                 native: true
                 vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
             Select Operator
-              expressions: UDFToString(t) (type: string)
+              expressions: CAST( t AS STRING) (type: string)
               outputColumnNames: _col0
               Select Vectorization:
                   className: VectorSelectOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out b/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
index 0193f3b..c131dc0 100644
--- a/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
+++ b/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
@@ -65,7 +65,7 @@ STAGE PLANS:
               predicate: ((cdecimal1 < 12345.5678) and (cdecimal1 > 0) and (cdecimal2 <> 0) and (cdecimal2 > 1000) and cdouble is not null) (type: boolean)
               Statistics: Num rows: 455 Data size: 78802 Basic stats: COMPLETE Column stats: NONE
               Select Operator
-                expressions: (cdecimal1 + cdecimal2) (type: decimal(25,14)), (cdecimal1 - (2 * cdecimal2)) (type: decimal(26,14)), ((cdecimal1 + 2.34) / cdecimal2) (type: decimal(38,13)), (cdecimal1 * (cdecimal2 / 3.4)) (type: decimal(38,17)), (cdecimal1 % 10) (type: decimal(12,10)), UDFToInteger(cdecimal1) (type: int), UDFToShort(cdecimal2) (type: smallint), UDFToByte(cdecimal2) (type: tinyint), UDFToLong(cdecimal1) (type: bigint), UDFToBoolean(cdecimal1) (type: boolean), UDFToDouble(cdecimal2) (type: double), UDFToFloat(cdecimal1) (type: float), UDFToString(cdecimal2) (type: string), CAST( cdecimal1 AS TIMESTAMP) (type: timestamp)
+                expressions: (cdecimal1 + cdecimal2) (type: decimal(25,14)), (cdecimal1 - (2 * cdecimal2)) (type: decimal(26,14)), ((cdecimal1 + 2.34) / cdecimal2) (type: decimal(38,13)), (cdecimal1 * (cdecimal2 / 3.4)) (type: decimal(38,17)), (cdecimal1 % 10) (type: decimal(12,10)), UDFToInteger(cdecimal1) (type: int), UDFToShort(cdecimal2) (type: smallint), UDFToByte(cdecimal2) (type: tinyint), UDFToLong(cdecimal1) (type: bigint), UDFToBoolean(cdecimal1) (type: boolean), UDFToDouble(cdecimal2) (type: double), UDFToFloat(cdecimal1) (type: float), CAST( cdecimal2 AS STRING) (type: string), CAST( cdecimal1 AS TIMESTAMP) (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
                 Select Vectorization:
                     className: VectorSelectOperator
@@ -137,16 +137,16 @@ LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_test_n1
 #### A masked pattern was here ####
-1836.44199584197700	-1166.02723492725400	0.8372697814834	245972.55810810255804469	5.6189189189	835	1000	NULL	835	true	1000.823076923077	835.6189	1000.823076923077	1970-01-01 00:13:55.618918918
-1856.13222453224620	-1178.52931392929240	0.8372449787014	251275.44324324968747899	4.5783783784	844	1011	NULL	844	true	1011.5538461538462	844.57837	1011.5538461538462	1970-01-01 00:14:04.578378378
-1858.75758835761550	-1180.19625779623100	0.8372417113669	251986.76756757564861519	5.7729729730	845	1012	NULL	845	true	1012.9846153846155	845.77295	1012.9846153846155	1970-01-01 00:14:05.772972973
-1862.69563409566930	-1182.69667359663860	0.8372368276345	253055.63918919969667286	7.5648648649	847	1015	NULL	847	true	1015.1307692307693	847.5649	1015.1307692307693	1970-01-01 00:14:07.564864864
-1883.69854469852330	-1196.03222453224660	0.8372111259286	258794.49324323677116559	7.1216216216	857	1026	NULL	857	true	1026.5769230769233	857.12164	1026.5769230769233	1970-01-01 00:14:17.121621621
-1886.32390852389240	-1197.69916839918480	0.8372079534582	259516.37432431944456816	8.3162162162	858	1028	NULL	858	true	1028.0076923076924	858.3162	1028.0076923076924	1970-01-01 00:14:18.316216216
-1887.63659043657700	-1198.53264033265400	0.8372063705322	259877.69189188782259834	8.9135135135	858	1028	NULL	858	true	1028.723076923077	858.9135	1028.723076923077	1970-01-01 00:14:18.913513513
-1895.51268191268460	-1203.53347193346920	0.8371969190171	262050.87567567649292835	2.4972972973	862	1033	NULL	862	true	1033.0153846153846	862.4973	1033.0153846153846	1970-01-01 00:14:22.497297297
-1909.95218295221550	-1212.70166320163100	0.8371797936946	266058.54729730725574014	9.0675675676	869	1040	NULL	869	true	1040.8846153846155	869.06757	1040.8846153846155	1970-01-01 00:14:29.067567567
-1913.89022869026920	-1215.20207900203840	0.8371751679996	267156.82702703945592392	0.8594594595	870	1043	NULL	870	true	1043.0307692307692	870.85944	1043.0307692307692	1970-01-01 00:14:30.859459459
+1836.44199584197700	-1166.02723492725400	0.8372697814834	245972.55810810255804469	5.6189189189	835	1000	NULL	835	true	1000.823076923077	835.6189	1000.82307692307700	1970-01-01 00:13:55.618918918
+1856.13222453224620	-1178.52931392929240	0.8372449787014	251275.44324324968747899	4.5783783784	844	1011	NULL	844	true	1011.5538461538462	844.57837	1011.55384615384620	1970-01-01 00:14:04.578378378
+1858.75758835761550	-1180.19625779623100	0.8372417113669	251986.76756757564861519	5.7729729730	845	1012	NULL	845	true	1012.9846153846155	845.77295	1012.98461538461550	1970-01-01 00:14:05.772972973
+1862.69563409566930	-1182.69667359663860	0.8372368276345	253055.63918919969667286	7.5648648649	847	1015	NULL	847	true	1015.1307692307693	847.5649	1015.13076923076930	1970-01-01 00:14:07.564864864
+1883.69854469852330	-1196.03222453224660	0.8372111259286	258794.49324323677116559	7.1216216216	857	1026	NULL	857	true	1026.5769230769233	857.12164	1026.57692307692330	1970-01-01 00:14:17.121621621
+1886.32390852389240	-1197.69916839918480	0.8372079534582	259516.37432431944456816	8.3162162162	858	1028	NULL	858	true	1028.0076923076924	858.3162	1028.00769230769240	1970-01-01 00:14:18.316216216
+1887.63659043657700	-1198.53264033265400	0.8372063705322	259877.69189188782259834	8.9135135135	858	1028	NULL	858	true	1028.723076923077	858.9135	1028.72307692307700	1970-01-01 00:14:18.913513513
+1895.51268191268460	-1203.53347193346920	0.8371969190171	262050.87567567649292835	2.4972972973	862	1033	NULL	862	true	1033.0153846153846	862.4973	1033.01538461538460	1970-01-01 00:14:22.497297297
+1909.95218295221550	-1212.70166320163100	0.8371797936946	266058.54729730725574014	9.0675675676	869	1040	NULL	869	true	1040.8846153846155	869.06757	1040.88461538461550	1970-01-01 00:14:29.067567567
+1913.89022869026920	-1215.20207900203840	0.8371751679996	267156.82702703945592392	0.8594594595	870	1043	NULL	870	true	1043.0307692307692	870.85944	1043.03076923076920	1970-01-01 00:14:30.859459459
 PREHOOK: query: SELECT SUM(HASH(*))
 FROM (SELECT cdecimal1 + cdecimal2 as c1, cdecimal1 - (2*cdecimal2) as c2, ((cdecimal1+2.34)/cdecimal2) as c3, (cdecimal1 * (cdecimal2/3.4)) as c4, cdecimal1 % 10 as c5, CAST(cdecimal1 AS INT) as c6, CAST(cdecimal2 AS SMALLINT) as c7, CAST(cdecimal2 AS TINYINT) as c8, CAST(cdecimal1 AS BIGINT) as c9, CAST (cdecimal1 AS BOOLEAN) as c10, CAST(cdecimal2 AS DOUBLE) as c11, CAST(cdecimal1 AS FLOAT) as c12, CAST(cdecimal2 AS STRING) as c13, CAST(cdecimal1 AS TIMESTAMP) as c14 FROM decimal_test_n1 WHERE cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdecimal2 > 1000 AND cdouble IS NOT NULL
 ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q
@@ -159,7 +159,7 @@ ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_test_n1
 #### A masked pattern was here ####
--1300490595129
+1506342314829
 PREHOOK: query: CREATE TABLE decimal_test_small_n0 STORED AS ORC AS SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(10,3)) AS cdecimal1, CAST (((cdouble*9.3)/13) AS DECIMAL(7,2)) AS cdecimal2 FROM alltypesorc
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
@@ -210,7 +210,7 @@ STAGE PLANS:
               predicate: ((cdecimal1 < 12345.5678) and (cdecimal1 > 0) and (cdecimal2 <> 0) and (cdecimal2 > 1000) and cdouble is not null) (type: boolean)
               Statistics: Num rows: 455 Data size: 78788 Basic stats: COMPLETE Column stats: NONE
               Select Operator
-                expressions: (cdecimal1 + cdecimal2) (type: decimal(11,3)), (cdecimal1 - (2 * cdecimal2)) (type: decimal(11,3)), ((cdecimal1 + 2.34) / cdecimal2) (type: decimal(21,11)), (cdecimal1 * (cdecimal2 / 3.4)) (type: decimal(23,9)), (cdecimal1 % 10) (type: decimal(5,3)), UDFToInteger(cdecimal1) (type: int), UDFToShort(cdecimal2) (type: smallint), UDFToByte(cdecimal2) (type: tinyint), UDFToLong(cdecimal1) (type: bigint), UDFToBoolean(cdecimal1) (type: boolean), UDFToDouble(cdecimal2) (type: double), UDFToFloat(cdecimal1) (type: float), UDFToString(cdecimal2) (type: string), CAST( cdecimal1 AS TIMESTAMP) (type: timestamp)
+                expressions: (cdecimal1 + cdecimal2) (type: decimal(11,3)), (cdecimal1 - (2 * cdecimal2)) (type: decimal(11,3)), ((cdecimal1 + 2.34) / cdecimal2) (type: decimal(21,11)), (cdecimal1 * (cdecimal2 / 3.4)) (type: decimal(23,9)), (cdecimal1 % 10) (type: decimal(5,3)), UDFToInteger(cdecimal1) (type: int), UDFToShort(cdecimal2) (type: smallint), UDFToByte(cdecimal2) (type: tinyint), UDFToLong(cdecimal1) (type: bigint), UDFToBoolean(cdecimal1) (type: boolean), UDFToDouble(cdecimal2) (type: double), UDFToFloat(cdecimal1) (type: float), CAST( cdecimal2 AS STRING) (type: string), CAST( cdecimal1 AS TIMESTAMP) (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
                 Select Vectorization:
                     className: VectorSelectOperator
@@ -304,4 +304,4 @@ ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_test_small_n0
 #### A masked pattern was here ####
-1273824888155
+1252336297085

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/vector_string_concat.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_string_concat.q.out b/ql/src/test/results/clientpositive/vector_string_concat.q.out
index 68b011d..ff9fad3 100644
--- a/ql/src/test/results/clientpositive/vector_string_concat.q.out
+++ b/ql/src/test/results/clientpositive/vector_string_concat.q.out
@@ -334,7 +334,7 @@ STAGE PLANS:
             TableScan Vectorization:
                 native: true
             Select Operator
-              expressions: concat(concat(concat('Quarter ', UDFToString(UDFToInteger(((UDFToDouble((month(dt) - 1)) / 3.0D) + 1.0D)))), '-'), UDFToString(year(dt))) (type: string)
+              expressions: concat(concat(concat('Quarter ', CAST( UDFToInteger(((UDFToDouble((month(dt) - 1)) / 3.0D) + 1.0D)) AS STRING)), '-'), CAST( year(dt) AS STRING)) (type: string)
               outputColumnNames: _col0
               Select Vectorization:
                   className: VectorSelectOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out b/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out
index f956d58..2ea6612 100644
--- a/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out
+++ b/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out
@@ -383,7 +383,7 @@ STAGE PLANS:
                 Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE
                 HashTable Sink Operator
                   keys:
-                    0 UDFToString(_col1) (type: string)
+                    0 CAST( _col1 AS STRING) (type: string)
                     1 _col1 (type: string)
 
   Stage: Stage-2
@@ -404,7 +404,7 @@ STAGE PLANS:
                   condition map:
                        Inner Join 0 to 1
                   keys:
-                    0 UDFToString(_col1) (type: string)
+                    0 CAST( _col1 AS STRING) (type: string)
                     1 _col1 (type: string)
                   outputColumnNames: _col0, _col1, _col2, _col3
                   Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/vectorized_casts.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_casts.q.out b/ql/src/test/results/clientpositive/vectorized_casts.q.out
index a19b5ee..4c82e17 100644
--- a/ql/src/test/results/clientpositive/vectorized_casts.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_casts.q.out
@@ -175,7 +175,7 @@ STAGE PLANS:
               predicate: ((cbigint % 250) = 0) (type: boolean)
               Statistics: Num rows: 6144 Data size: 1453997 Basic stats: COMPLETE Column stats: NONE
               Select Operator
-                expressions: UDFToBoolean(ctinyint) (type: boolean), UDFToBoolean(csmallint) (type: boolean), UDFToBoolean(cint) (type: boolean), UDFToBoolean(cbigint) (type: boolean), UDFToBoolean(cfloat) (type: boolean), UDFToBoolean(cdouble) (type: boolean), cboolean1 (type: boolean), UDFToBoolean((cbigint * 0L)) (type: boolean), UDFToBoolean(ctimestamp1) (type: boolean), UDFToBoolean(cstring1) (type: boolean), UDFToInteger(ctinyint) (type: int), UDFToInteger(csmallint) (type: int), cint (type: int), UDFToInteger(cbigint) (type: int), UDFToInteger(cfloat) (type: int), UDFToInteger(cdouble) (type: int), UDFToInteger(cboolean1) (type: int), UDFToInteger(ctimestamp1) (type: int), UDFToInteger(cstring1) (type: int), UDFToInteger(substr(cstring1, 1, 1)) (type: int), UDFToByte(cfloat) (type: tinyint), UDFToShort(cfloat) (type: smallint), UDFToLong(cfloat) (type: bigint), UDFToDouble(ctinyint) (type: double), UDFToDouble(csmallint) (type: double), UDFToDouble(cint) (type: double), UDFTo
 Double(cbigint) (type: double), UDFToDouble(cfloat) (type: double), cdouble (type: double), UDFToDouble(cboolean1) (type: double), UDFToDouble(ctimestamp1) (type: double), UDFToDouble(cstring1) (type: double), UDFToDouble(substr(cstring1, 1, 1)) (type: double), UDFToFloat(cint) (type: float), UDFToFloat(cdouble) (type: float), CAST( ctinyint AS TIMESTAMP) (type: timestamp), CAST( csmallint AS TIMESTAMP) (type: timestamp), CAST( cint AS TIMESTAMP) (type: timestamp), CAST( cbigint AS TIMESTAMP) (type: timestamp), CAST( cfloat AS TIMESTAMP) (type: timestamp), CAST( cdouble AS TIMESTAMP) (type: timestamp), CAST( cboolean1 AS TIMESTAMP) (type: timestamp), CAST( (cbigint * 0L) AS TIMESTAMP) (type: timestamp), CAST( CAST( ctimestamp1 AS DATE) AS TIMESTAMP) (type: timestamp), ctimestamp1 (type: timestamp), CAST( cstring1 AS TIMESTAMP) (type: timestamp), CAST( substr(cstring1, 1, 1) AS TIMESTAMP) (type: timestamp), UDFToString(ctinyint) (type: string), UDFToString(csmallint) (type: string), 
 UDFToString(cint) (type: string), UDFToString(cbigint) (type: string), UDFToString(cfloat) (type: string), UDFToString(cdouble) (type: string), UDFToString(cboolean1) (type: string), UDFToString((cbigint * 0L)) (type: string), UDFToString(ctimestamp1) (type: string), cstring1 (type: string), UDFToString(CAST( cstring1 AS CHAR(10))) (type: string), UDFToString(CAST( cstring1 AS varchar(10))) (type: string), UDFToFloat(UDFToInteger(cfloat)) (type: float), UDFToDouble((cint * 2)) (type: double), UDFToString(sin(cfloat)) (type: string), (UDFToDouble(UDFToFloat(cint)) + UDFToDouble(cboolean1)) (type: double)
+                expressions: UDFToBoolean(ctinyint) (type: boolean), UDFToBoolean(csmallint) (type: boolean), UDFToBoolean(cint) (type: boolean), UDFToBoolean(cbigint) (type: boolean), UDFToBoolean(cfloat) (type: boolean), UDFToBoolean(cdouble) (type: boolean), cboolean1 (type: boolean), UDFToBoolean((cbigint * 0L)) (type: boolean), UDFToBoolean(ctimestamp1) (type: boolean), UDFToBoolean(cstring1) (type: boolean), UDFToInteger(ctinyint) (type: int), UDFToInteger(csmallint) (type: int), cint (type: int), UDFToInteger(cbigint) (type: int), UDFToInteger(cfloat) (type: int), UDFToInteger(cdouble) (type: int), UDFToInteger(cboolean1) (type: int), UDFToInteger(ctimestamp1) (type: int), UDFToInteger(cstring1) (type: int), UDFToInteger(substr(cstring1, 1, 1)) (type: int), UDFToByte(cfloat) (type: tinyint), UDFToShort(cfloat) (type: smallint), UDFToLong(cfloat) (type: bigint), UDFToDouble(ctinyint) (type: double), UDFToDouble(csmallint) (type: double), UDFToDouble(cint) (type: double), UDFTo
 Double(cbigint) (type: double), UDFToDouble(cfloat) (type: double), cdouble (type: double), UDFToDouble(cboolean1) (type: double), UDFToDouble(ctimestamp1) (type: double), UDFToDouble(cstring1) (type: double), UDFToDouble(substr(cstring1, 1, 1)) (type: double), UDFToFloat(cint) (type: float), UDFToFloat(cdouble) (type: float), CAST( ctinyint AS TIMESTAMP) (type: timestamp), CAST( csmallint AS TIMESTAMP) (type: timestamp), CAST( cint AS TIMESTAMP) (type: timestamp), CAST( cbigint AS TIMESTAMP) (type: timestamp), CAST( cfloat AS TIMESTAMP) (type: timestamp), CAST( cdouble AS TIMESTAMP) (type: timestamp), CAST( cboolean1 AS TIMESTAMP) (type: timestamp), CAST( (cbigint * 0L) AS TIMESTAMP) (type: timestamp), CAST( CAST( ctimestamp1 AS DATE) AS TIMESTAMP) (type: timestamp), ctimestamp1 (type: timestamp), CAST( cstring1 AS TIMESTAMP) (type: timestamp), CAST( substr(cstring1, 1, 1) AS TIMESTAMP) (type: timestamp), CAST( ctinyint AS STRING) (type: string), CAST( csmallint AS STRING) (type: s
 tring), CAST( cint AS STRING) (type: string), CAST( cbigint AS STRING) (type: string), CAST( cfloat AS STRING) (type: string), CAST( cdouble AS STRING) (type: string), CAST( cboolean1 AS STRING) (type: string), CAST( (cbigint * 0L) AS STRING) (type: string), CAST( ctimestamp1 AS STRING) (type: string), cstring1 (type: string), CAST( CAST( cstring1 AS CHAR(10)) AS STRING) (type: string), CAST( CAST( cstring1 AS varchar(10)) AS STRING) (type: string), UDFToFloat(UDFToInteger(cfloat)) (type: float), UDFToDouble((cint * 2)) (type: double), CAST( sin(cfloat) AS STRING) (type: string), (UDFToDouble(UDFToFloat(cint)) + UDFToDouble(cboolean1)) (type: double)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35, _col36, _col37, _col38, _col39, _col40, _col41, _col42, _col43, _col44, _col45, _col46, _col47, _col48, _col49, _col50, _col51, _col52, _col53, _col54, _col55, _col56, _col57, _col58, _col59, _col60, _col61, _col62
                 Select Vectorization:
                     className: VectorSelectOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
index 1e12cca..84c027d 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
@@ -513,7 +513,8 @@ public class PrimitiveObjectInspectorConverter {
         }
         return t;
       case DECIMAL:
-        t.set(((HiveDecimalObjectInspector) inputOI).getPrimitiveWritableObject(input).toString());
+        HiveDecimal decimalVal = ((HiveDecimalObjectInspector) inputOI).getPrimitiveJavaObject(input);
+        t.set(decimalVal.toFormatString(inputOI.scale()));
         return t;
       default:
         throw new RuntimeException("Hive 2 Internal error: type = " + inputOI.getTypeName());

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
index 6362f2e..8a057d1 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
@@ -963,8 +963,9 @@ public final class PrimitiveObjectInspectorUtils {
       result = ((HiveIntervalDayTimeObjectInspector) oi).getPrimitiveWritableObject(o).toString();
       break;
     case DECIMAL:
+      int scale = ((HiveDecimalObjectInspector) oi).scale();
       result = ((HiveDecimalObjectInspector) oi)
-          .getPrimitiveJavaObject(o).toString();
+          .getPrimitiveJavaObject(o).toFormatString(scale);
       break;
     default:
       throw new RuntimeException("Hive 2 Internal error: unknown type: "

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java
index 32fab31..175d453 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java
@@ -246,10 +246,54 @@ public class TestObjectInspectorConverters extends TestCase {
       textConverter = ObjectInspectorConverters.getConverter(
           PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector,
           PrimitiveObjectInspectorFactory.writableStringObjectInspector);
-      assertEquals("TextConverter", new Text("100.001"), textConverter
+      assertEquals("TextConverter", new Text("100.001000000000000000"), textConverter
 	  .convert(HiveDecimal.create("100.001")));
       assertEquals("TextConverter", null, textConverter.convert(null));
 
+      // Varchar
+      PrimitiveTypeInfo varchar5TI =
+          (PrimitiveTypeInfo) TypeInfoFactory.getPrimitiveTypeInfo("varchar(5)");
+      PrimitiveTypeInfo varchar30TI =
+          (PrimitiveTypeInfo) TypeInfoFactory.getPrimitiveTypeInfo("varchar(30)");
+      PrimitiveObjectInspector varchar5OI =
+          PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(varchar5TI);
+      PrimitiveObjectInspector varchar30OI =
+          PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(varchar30TI);
+      // Value should be truncated to varchar length 5
+      varcharConverter = ObjectInspectorConverters.getConverter(
+          PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector,
+          varchar5OI);
+      assertEquals("VarcharConverter", "100.0",
+          varcharConverter.convert(HiveDecimal.create("100.001")).toString());
+
+      varcharConverter = ObjectInspectorConverters.getConverter(
+          PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector,
+          varchar30OI);
+      assertEquals("VarcharConverter", "100.001000000000000000",
+          varcharConverter.convert(HiveDecimal.create("100.001")).toString());
+
+      // Char
+      PrimitiveTypeInfo char5TI =
+          (PrimitiveTypeInfo) TypeInfoFactory.getPrimitiveTypeInfo("char(5)");
+      PrimitiveTypeInfo char30TI =
+          (PrimitiveTypeInfo) TypeInfoFactory.getPrimitiveTypeInfo("char(30)");
+      PrimitiveObjectInspector char5OI =
+          PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(char5TI);
+      PrimitiveObjectInspector char30OI =
+          PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(char30TI);
+      // Value should be truncated to char length 5
+      charConverter = ObjectInspectorConverters.getConverter(
+          PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector,
+          char5OI);
+      assertEquals("CharConverter", "100.0",
+          charConverter.convert(HiveDecimal.create("100.001")).toString());
+      // Char value should be have space padding to full char length
+      charConverter = ObjectInspectorConverters.getConverter(
+          PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector,
+          char30OI);
+      assertEquals("CharConverter", "100.001000000000000000        ",
+          charConverter.convert(HiveDecimal.create("100.001")).toString());
+
       // Binary
       Converter baConverter = ObjectInspectorConverters.getConverter(
           PrimitiveObjectInspectorFactory.javaStringObjectInspector,

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java
index 3c2797e..c731a57 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.junit.Test;
 
 import junit.framework.TestCase;
@@ -243,4 +244,17 @@ public class TestPrimitiveObjectInspectorUtils extends TestCase {
       assertTrue(trueStr, PrimitiveObjectInspectorUtils.parseBoolean(b1, 3, trueStr.length()));
     }
   }
+
+@Test public void testDecimalToString() {
+    HiveDecimal dec1 = HiveDecimal.create("0.0");
+    PrimitiveObjectInspector decOI_7_0 =
+        PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new DecimalTypeInfo(7, 0));
+    PrimitiveObjectInspector decOI_7_1 =
+        PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new DecimalTypeInfo(7, 1));
+    PrimitiveObjectInspector decOI_7_3 =
+        PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new DecimalTypeInfo(7, 3));
+    assertEquals("0", PrimitiveObjectInspectorUtils.getString(dec1, decOI_7_0));
+    assertEquals("0.0", PrimitiveObjectInspectorUtils.getString(dec1, decOI_7_1));
+    assertEquals("0.000", PrimitiveObjectInspectorUtils.getString(dec1, decOI_7_3));
+  }
 }


[07/11] hive git commit: HIVE-20082: HiveDecimal to string conversion doesn't format the decimal correctly (Jason Dere, reviewed by Ashutosh Chauhan)

Posted by se...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/tez_union_multiinsert.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/tez_union_multiinsert.q.out b/ql/src/test/results/clientpositive/llap/tez_union_multiinsert.q.out
index aa9d790..04ae90a 100644
--- a/ql/src/test/results/clientpositive/llap/tez_union_multiinsert.q.out
+++ b/ql/src/test/results/clientpositive/llap/tez_union_multiinsert.q.out
@@ -155,7 +155,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -190,7 +190,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 316 Data size: 30336 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 316 Data size: 85952 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
@@ -239,7 +239,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 1001 Data size: 280280 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: _col0 (type: string), _col1 (type: string), UDFToString(_col2) (type: string)
+                  expressions: _col0 (type: string), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 1001 Data size: 456456 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
@@ -1131,7 +1131,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 316 Data size: 30336 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 316 Data size: 85952 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
@@ -1180,7 +1180,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 1001 Data size: 280280 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: _col0 (type: string), _col1 (type: string), UDFToString(_col2) (type: string)
+                  expressions: _col0 (type: string), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 1001 Data size: 456456 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
@@ -1228,7 +1228,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -2102,7 +2102,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 316 Data size: 30336 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 316 Data size: 85952 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
@@ -2151,7 +2151,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 1001 Data size: 280280 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: _col0 (type: string), _col1 (type: string), UDFToString(_col2) (type: string)
+                  expressions: _col0 (type: string), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 1001 Data size: 456456 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
@@ -2199,7 +2199,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -3030,7 +3030,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -3065,7 +3065,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 24000 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 68000 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
@@ -3114,7 +3114,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 501 Data size: 140280 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: _col0 (type: string), _col1 (type: string), UDFToString(_col2) (type: string)
+                  expressions: _col0 (type: string), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 501 Data size: 228456 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
@@ -3941,7 +3941,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -3980,7 +3980,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250 Data size: 116000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    expressions: _col0 (type: string), _col1 (type: string), UDFToString(_col2) (type: string)
+                    expressions: _col0 (type: string), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
                     outputColumnNames: _col0, _col1, _col2
                     Statistics: Num rows: 250 Data size: 114000 Basic stats: COMPLETE Column stats: COMPLETE
                     File Output Operator
@@ -4014,7 +4014,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 125 Data size: 12000 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 125 Data size: 34000 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/union6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/union6.q.out b/ql/src/test/results/clientpositive/llap/union6.q.out
index da9f4a2..cfd20a8 100644
--- a/ql/src/test/results/clientpositive/llap/union6.q.out
+++ b/ql/src/test/results/clientpositive/llap/union6.q.out
@@ -93,7 +93,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out b/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out
index ff80132..47a0be9 100644
--- a/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out
@@ -1057,7 +1057,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1_n93' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1_n93' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 276 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -1096,7 +1096,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250 Data size: 117000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    expressions: _col0 (type: string), _col1 (type: string), UDFToString(_col2) (type: string)
+                    expressions: _col0 (type: string), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
                     outputColumnNames: _col0, _col1, _col2
                     Statistics: Num rows: 250 Data size: 115000 Basic stats: COMPLETE Column stats: COMPLETE
                     File Output Operator
@@ -1130,7 +1130,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 125 Data size: 12500 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 125 Data size: 34500 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
@@ -1981,7 +1981,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1_n93' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1_n93' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 276 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -2875,7 +2875,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1_n93' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1_n93' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 276 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -2903,7 +2903,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 25000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                    expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 250 Data size: 69000 Basic stats: COMPLETE Column stats: COMPLETE
                     File Output Operator
@@ -11230,7 +11230,7 @@ STAGE PLANS:
                         Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE
                         HybridGraceHashJoin: true
                         Select Operator
-                          expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToString(CAST( _col1 AS varchar(20))) (type: string)
+                          expressions: UDFToDouble(UDFToLong(_col0)) (type: double), CAST( CAST( _col1 AS varchar(20)) AS STRING) (type: string)
                           outputColumnNames: _col0, _col1
                           Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE
                           Group By Operator
@@ -11456,7 +11456,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 23750 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 67750 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -11617,7 +11617,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 23750 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 67750 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/unionDistinct_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/unionDistinct_3.q.out b/ql/src/test/results/clientpositive/llap/unionDistinct_3.q.out
index d3f774d..998d497 100644
--- a/ql/src/test/results/clientpositive/llap/unionDistinct_3.q.out
+++ b/ql/src/test/results/clientpositive/llap/unionDistinct_3.q.out
@@ -206,7 +206,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -1106,7 +1106,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -1260,7 +1260,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -1745,7 +1745,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -1799,7 +1799,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out b/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out
index ab083e7..b531d79 100644
--- a/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out
@@ -202,7 +202,7 @@ STAGE PLANS:
                   alias: lineitem_test
                   Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP      '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'D
 ELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date)
+                    expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP      '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((CAST( l_shipinstruct AS STRING) 
 = 'DELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
                     Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
@@ -518,13 +518,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2)/DECIMAL_64, 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP      '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'D
 ELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date)
+                    expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP      '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((CAST( l_shipinstruct AS STRING) 
 = 'DELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [4, 22, 24, 25, 26, 27, 28, 30, 31, 32, 33, 34, 35, 38, 40, 43, 44]
-                        selectExpressions: IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 21:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 22:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 21:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprStringScalarStringScalar(col 20:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 21:string) -> 22:string) -> 21:string) -> 22:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 24:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean
 , val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprColumnNull(col 20:boolean, col 21:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean, ConstantVectorExpression(val Many) -> 21:string) -> 23:string) -> 24:string) -> 23:string) -> 24:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprNullNull(null, null) -> 23:string) -> 25:string) -> 23:string) -> 25:string, IfExprLongColumnLongColumn(col 17:boolean, col 18:date, col 19:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 17:boolean, VectorUDFDateAddColSca
 lar(col 10:date, val 10) -> 18:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 19:date) -> 26:date, IfExprDoubleColumnLongScalar(col 17:boolean, col 28:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 27:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 27:double) -> 28:double) -> 27:double, IfExprDoubleColumnDoubleScalar(col 17:boolean, col 29:double, val 0.0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 28:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 28:double) -> 29:double) -> 28:double, IfExprNullColumn(col 17:boolean, null, col 46)(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 17:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_
 64) -> 46:decimal(10,2)) -> 30:decimal(10,2), IfExprColumnNull(col 18:boolean, col 47:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 18:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 47:decimal(10,2)) -> 31:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 32:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 33:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: 
 StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 34:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 35:decimal(10,2), IfExprTimestampColumnColumn(col 19:boolean, col 36:timestampcol 37:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 19:boolean, CastDateToTimestamp(col 12:date) -> 36:timestamp, CastDateToTimestamp(col 11:date) -> 37:timestamp) -> 38:timestamp, IfExprColumnNull(col 19:boolean, col 39:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 19:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 39:int) -> 40:int, IfExprNullColumn(col 41:boolean, null, col 42)(children: LongColGreaterLongScalar(col 2:int, val 1
 0000) -> 41:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 42:int) -> 43:int, IfExprLongScalarLongScalar(col 45:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 44:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 44:int) -> 45:boolean) -> 44:date
+                        selectExpressions: IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 21:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 22:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 21:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprStringScalarStringScalar(col 20:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 21:string) -> 22:string) -> 21:string) -> 22:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 24:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean
 , val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprColumnNull(col 20:boolean, col 21:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean, ConstantVectorExpression(val Many) -> 21:string) -> 23:string) -> 24:string) -> 23:string) -> 24:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprNullNull(null, null) -> 23:string) -> 25:string) -> 23:string) -> 25:string, IfExprLongColumnLongColumn(col 17:boolean, col 18:date, col 19:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 17:boolean, VectorUDFDateAddColSca
 lar(col 10:date, val 10) -> 18:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 19:date) -> 26:date, IfExprDoubleColumnLongScalar(col 17:boolean, col 28:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 27:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 27:double) -> 28:double) -> 27:double, IfExprDoubleColumnDoubleScalar(col 17:boolean, col 29:double, val 0.0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 28:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 28:double) -> 29:double) -> 28:double, IfExprNullColumn(col 17:boolean, null, col 46)(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 17:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_
 64) -> 46:decimal(10,2)) -> 30:decimal(10,2), IfExprColumnNull(col 18:boolean, col 47:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 18:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 47:decimal(10,2)) -> 31:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 32:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 33:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax)
 )(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 34:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 35:decimal(10,2), IfExprTimestampColumnColumn(col 19:boolean, col 36:timestampcol 37:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 19:boolean, CastDateToTimestamp(col 12:date) -> 36:timestamp, CastDateToTimestamp(col 11:date) -> 37:timestamp) -> 38:timestamp, IfExprColumnNull(col 19:boolean, col 39:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 19:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 39:int) -> 40:int, IfExprNullColumn(col 41:boolean, null, col 42)(children: LongColGreaterLongScalar(
 col 2:int, val 10000) -> 41:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 42:int) -> 43:int, IfExprLongScalarLongScalar(col 45:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 44:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 44:int) -> 45:boolean) -> 44:date
                     Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
@@ -851,13 +851,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:l_orderkey:int, 1:l_partkey:int, 2:l_suppkey:int, 3:l_linenumber:int, 4:l_quantity:int, 5:l_extendedprice:double, 6:l_discount:double, 7:l_tax:decimal(10,2)/DECIMAL_64, 8:l_returnflag:char(1), 9:l_linestatus:char(1), 10:l_shipdate:date, 11:l_commitdate:date, 12:l_receiptdate:date, 13:l_shipinstruct:varchar(20), 14:l_shipmode:char(10), 15:l_comment:string, 16:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP      '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((UDFToString(l_shipinstruct) = 'D
 ELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date)
+                    expressions: l_quantity (type: int), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE ('Huge number') END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN ('Many') ELSE (null) END (type: string), CASE WHEN ((l_quantity = 1)) THEN ('Single') WHEN ((l_quantity = 2)) THEN ('Two') WHEN ((l_quantity < 10)) THEN ('Some') WHEN ((l_quantity < 100)) THEN (null) ELSE (null) END (type: string), if((l_shipmode = 'SHIP      '), date_add(l_shipdate, 10), date_add(l_shipdate, 5)) (type: date), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0) END (type: double), CASE WHEN ((l_returnflag = 'N')) THEN ((l_extendedprice * (1.0D - l_discount))) ELSE (0.0D) END (type: double), if((CAST( l_shipinstruct AS STRING) 
 = 'DELIVER IN PERSON'), null, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, null) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(12,2)), if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax) (type: decimal(10,2)), if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0) (type: decimal(10,2)), if((l_partkey > 30), CAST( l_receiptdate AS TIMESTAMP), CAST( l_commitdate AS TIMESTAMP)) (type: timestamp), if((l_suppkey > 10000), datediff(l_receiptdate, l_commitdate), null) (type: int), if((l_suppkey > 10000), null, datediff(l_receiptdate, l_commitdate)) (type: int), if(((l_suppkey % 500) > 100), DATE'2009-01-01', DATE'2009-12-31') (type: date)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [4, 27, 38, 48, 52, 54, 60, 63, 65, 67, 68, 69, 70, 73, 76, 79, 80]
-                        selectExpressions: IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 26:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, ConstantVectorExpression(val Single) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 19:boolean, ConstantVectorExpression(val Two) -> 20:string, IfExprColumnCondExpr(col 21:boolean, col 22:stringcol 24:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 21:boolean, ConstantVectorExpression(val Some) -> 22:string, IfExprStringScalarStringScalar(col 23:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 23:boolean) -> 24:string) -> 25:string) -> 26:string) -> 27:string, IfExprColumnCondExpr(col 23:boolean, col 28:stringcol 37:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 23:boolean, ConstantVectorExpression(val Single) -> 28:string, IfExprColumnCondExpr(col 29:boolea
 n, col 30:stringcol 36:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 29:boolean, ConstantVectorExpression(val Two) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 35:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 31:boolean, ConstantVectorExpression(val Some) -> 32:string, IfExprColumnNull(col 33:boolean, col 34:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 33:boolean, ConstantVectorExpression(val Many) -> 34:string) -> 35:string) -> 36:string) -> 37:string) -> 38:string, IfExprColumnCondExpr(col 39:boolean, col 40:stringcol 47:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 39:boolean, ConstantVectorExpression(val Single) -> 40:string, IfExprColumnCondExpr(col 41:boolean, col 42:stringcol 46:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 41:boolean, ConstantVectorExpression(val Two) -> 42:string, IfExprColumnCondExpr(col 43:boolean, col 44:stringcol 45:string)(children: LongColLes
 sLongScalar(col 4:int, val 10) -> 43:boolean, ConstantVectorExpression(val Some) -> 44:string, IfExprNullNull(null, null) -> 45:string) -> 46:string) -> 47:string) -> 48:string, IfExprCondExprCondExpr(col 49:boolean, col 50:datecol 51:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 49:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 50:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 51:date) -> 52:date, IfExprDoubleColumnLongScalar(col 57:boolean, col 58:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 54:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 54:double) -> 58:double) -> 54:double, IfExprCondExprColumn(col 57:boolean, col 59:double, col 58:double)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 58:double)(children: DoubleScalarSubtractD
 oubleColumn(val 1.0, col 6:double) -> 58:double) -> 59:double, ConstantVectorExpression(val 0.0) -> 58:double) -> 60:double, IfExprNullColumn(col 62:boolean, null, col 82)(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 62:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 82:decimal(10,2)) -> 63:decimal(10,2), IfExprColumnNull(col 64:boolean, col 83:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 64:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 83:decimal(10,2)) -> 65:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:b
 oolean) -> 67:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 68:decimal(12,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 69:decimal(10,2), VectorUDFAdaptor(if((UDFToString(l_shipinstruct) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 70:decimal(10,2), IfExprCondExprCondExpr(col 66:boolean, col 71:timestampcol 72:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 66:boolean, CastDateToTimestamp(col 12
 :date) -> 71:timestamp, CastDateToTimestamp(col 11:date) -> 72:timestamp) -> 73:timestamp, IfExprCondExprNull(col 74:boolean, col 75:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 74:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 75:int) -> 76:int, IfExprNullCondExpr(col 77:boolean, null, col 78:int)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 77:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 78:int) -> 79:int, IfExprLongScalarLongScalar(col 81:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 80:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 80:int) -> 81:boolean) -> 80:date
+                        selectExpressions: IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 26:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, ConstantVectorExpression(val Single) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 19:boolean, ConstantVectorExpression(val Two) -> 20:string, IfExprColumnCondExpr(col 21:boolean, col 22:stringcol 24:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 21:boolean, ConstantVectorExpression(val Some) -> 22:string, IfExprStringScalarStringScalar(col 23:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 23:boolean) -> 24:string) -> 25:string) -> 26:string) -> 27:string, IfExprColumnCondExpr(col 23:boolean, col 28:stringcol 37:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 23:boolean, ConstantVectorExpression(val Single) -> 28:string, IfExprColumnCondExpr(col 29:boolea
 n, col 30:stringcol 36:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 29:boolean, ConstantVectorExpression(val Two) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 35:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 31:boolean, ConstantVectorExpression(val Some) -> 32:string, IfExprColumnNull(col 33:boolean, col 34:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 33:boolean, ConstantVectorExpression(val Many) -> 34:string) -> 35:string) -> 36:string) -> 37:string) -> 38:string, IfExprColumnCondExpr(col 39:boolean, col 40:stringcol 47:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 39:boolean, ConstantVectorExpression(val Single) -> 40:string, IfExprColumnCondExpr(col 41:boolean, col 42:stringcol 46:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 41:boolean, ConstantVectorExpression(val Two) -> 42:string, IfExprColumnCondExpr(col 43:boolean, col 44:stringcol 45:string)(children: LongColLes
 sLongScalar(col 4:int, val 10) -> 43:boolean, ConstantVectorExpression(val Some) -> 44:string, IfExprNullNull(null, null) -> 45:string) -> 46:string) -> 47:string) -> 48:string, IfExprCondExprCondExpr(col 49:boolean, col 50:datecol 51:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 49:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 50:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 51:date) -> 52:date, IfExprDoubleColumnLongScalar(col 57:boolean, col 58:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 54:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 54:double) -> 58:double) -> 54:double, IfExprCondExprColumn(col 57:boolean, col 59:double, col 58:double)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 58:double)(children: DoubleScalarSubtractD
 oubleColumn(val 1.0, col 6:double) -> 58:double) -> 59:double, ConstantVectorExpression(val 0.0) -> 58:double) -> 60:double, IfExprNullColumn(col 62:boolean, null, col 82)(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 62:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 82:decimal(10,2)) -> 63:decimal(10,2), IfExprColumnNull(col 64:boolean, col 83:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 64:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 83:decimal(10,2)) -> 65:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 
 66:boolean) -> 67:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 68:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 69:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 70:decimal(10,2), IfExprCondExprCondExpr(col 66:boolean, col 71:timestampcol 72:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 66:boolean, CastDateTo
 Timestamp(col 12:date) -> 71:timestamp, CastDateToTimestamp(col 11:date) -> 72:timestamp) -> 73:timestamp, IfExprCondExprNull(col 74:boolean, col 75:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 74:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 75:int) -> 76:int, IfExprNullCondExpr(col 77:boolean, null, col 78:int)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 77:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 78:int) -> 79:int, IfExprLongScalarLongScalar(col 81:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 80:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 80:int) -> 81:boolean) -> 80:date
                     Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out b/ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out
index 7fe7af7..31b3807 100644
--- a/ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out
@@ -509,9 +509,9 @@ STAGE PLANS:
                           projectedOutputColumnNums: [0, 1]
                       Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: UDFToString(_col1) (type: string)
+                        key expressions: CAST( _col1 AS STRING) (type: string)
                         sort order: +
-                        Map-reduce partition columns: UDFToString(_col1) (type: string)
+                        Map-reduce partition columns: CAST( _col1 AS STRING) (type: string)
                         Reduce Sink Vectorization:
                             className: VectorReduceSinkStringOperator
                             keyExpressions: CastStringGroupToString(col 1:char(10)) -> 3:string
@@ -557,7 +557,7 @@ STAGE PLANS:
                         condition map:
                              Inner Join 0 to 1
                         keys:
-                          0 UDFToString(_col1) (type: string)
+                          0 CAST( _col1 AS STRING) (type: string)
                           1 _col1 (type: string)
                         Map Join Vectorization:
                             className: VectorMapJoinInnerStringOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out
index 8d8cbf5..fd934f0 100644
--- a/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out
@@ -893,7 +893,7 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:t:decimal(4,2)/DECIMAL_64, 1:u:decimal(5,0)/DECIMAL_64, 2:v:decimal(10,0)/DECIMAL_64, 3:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: UDFToString(t) (type: string)
+                    expressions: CAST( t AS STRING) (type: string)
                     outputColumnNames: _col0
                     Select Vectorization:
                         className: VectorSelectOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out
index 8cd753c..24873a4 100644
--- a/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out
@@ -864,7 +864,7 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: UDFToString(t) (type: string)
+                    expressions: CAST( t AS STRING) (type: string)
                     outputColumnNames: _col0
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -948,7 +948,7 @@ POSTHOOK: query: select cast(t as string) from decimal_2 order by t
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_2
 #### A masked pattern was here ####
-17.29
+17.290000000
 PREHOOK: query: insert overwrite table decimal_2
   select cast('3404045.5044003' as decimal(18,9)) from src tablesample (1 rows)
 PREHOOK: type: QUERY
@@ -1803,7 +1803,7 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: UDFToString(t) (type: string)
+                    expressions: CAST( t AS STRING) (type: string)
                     outputColumnNames: _col0
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -1887,7 +1887,7 @@ POSTHOOK: query: select cast(t as string) from decimal_2 order by t
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_2
 #### A masked pattern was here ####
-3404045.5044003
+3404045.504400300
 PREHOOK: query: explain vectorization detail
 select cast(3.14 as decimal(4,2)) as c from decimal_2 order by c
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out
index 190bcd5..a7b2714 100644
--- a/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out
@@ -71,7 +71,7 @@ STAGE PLANS:
                     predicate: ((cdecimal1 < 12345.5678) and (cdecimal1 > 0) and (cdecimal2 <> 0) and (cdecimal2 > 1000) and cdouble is not null) (type: boolean)
                     Statistics: Num rows: 455 Data size: 100294 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: (cdecimal1 + cdecimal2) (type: decimal(25,14)), (cdecimal1 - (2 * cdecimal2)) (type: decimal(26,14)), ((cdecimal1 + 2.34) / cdecimal2) (type: decimal(38,13)), (cdecimal1 * (cdecimal2 / 3.4)) (type: decimal(38,17)), (cdecimal1 % 10) (type: decimal(12,10)), UDFToInteger(cdecimal1) (type: int), UDFToShort(cdecimal2) (type: smallint), UDFToByte(cdecimal2) (type: tinyint), UDFToLong(cdecimal1) (type: bigint), UDFToBoolean(cdecimal1) (type: boolean), UDFToDouble(cdecimal2) (type: double), UDFToFloat(cdecimal1) (type: float), UDFToString(cdecimal2) (type: string), CAST( cdecimal1 AS TIMESTAMP) (type: timestamp)
+                      expressions: (cdecimal1 + cdecimal2) (type: decimal(25,14)), (cdecimal1 - (2 * cdecimal2)) (type: decimal(26,14)), ((cdecimal1 + 2.34) / cdecimal2) (type: decimal(38,13)), (cdecimal1 * (cdecimal2 / 3.4)) (type: decimal(38,17)), (cdecimal1 % 10) (type: decimal(12,10)), UDFToInteger(cdecimal1) (type: int), UDFToShort(cdecimal2) (type: smallint), UDFToByte(cdecimal2) (type: tinyint), UDFToLong(cdecimal1) (type: bigint), UDFToBoolean(cdecimal1) (type: boolean), UDFToDouble(cdecimal2) (type: double), UDFToFloat(cdecimal1) (type: float), CAST( cdecimal2 AS STRING) (type: string), CAST( cdecimal1 AS TIMESTAMP) (type: timestamp)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
                       Select Vectorization:
                           className: VectorSelectOperator
@@ -166,16 +166,16 @@ LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_test_n1
 #### A masked pattern was here ####
-1836.44199584197700	-1166.02723492725400	0.8372697814834	245972.55810810255804469	5.6189189189	835	1000	NULL	835	true	1000.823076923077	835.6189	1000.823076923077	1970-01-01 00:13:55.618918918
-1856.13222453224620	-1178.52931392929240	0.8372449787014	251275.44324324968747899	4.5783783784	844	1011	NULL	844	true	1011.5538461538462	844.57837	1011.5538461538462	1970-01-01 00:14:04.578378378
-1858.75758835761550	-1180.19625779623100	0.8372417113669	251986.76756757564861519	5.7729729730	845	1012	NULL	845	true	1012.9846153846155	845.77295	1012.9846153846155	1970-01-01 00:14:05.772972973
-1862.69563409566930	-1182.69667359663860	0.8372368276345	253055.63918919969667286	7.5648648649	847	1015	NULL	847	true	1015.1307692307693	847.5649	1015.1307692307693	1970-01-01 00:14:07.564864864
-1883.69854469852330	-1196.03222453224660	0.8372111259286	258794.49324323677116559	7.1216216216	857	1026	NULL	857	true	1026.5769230769233	857.12164	1026.5769230769233	1970-01-01 00:14:17.121621621
-1886.32390852389240	-1197.69916839918480	0.8372079534582	259516.37432431944456816	8.3162162162	858	1028	NULL	858	true	1028.0076923076924	858.3162	1028.0076923076924	1970-01-01 00:14:18.316216216
-1887.63659043657700	-1198.53264033265400	0.8372063705322	259877.69189188782259834	8.9135135135	858	1028	NULL	858	true	1028.723076923077	858.9135	1028.723076923077	1970-01-01 00:14:18.913513513
-1895.51268191268460	-1203.53347193346920	0.8371969190171	262050.87567567649292835	2.4972972973	862	1033	NULL	862	true	1033.0153846153846	862.4973	1033.0153846153846	1970-01-01 00:14:22.497297297
-1909.95218295221550	-1212.70166320163100	0.8371797936946	266058.54729730725574014	9.0675675676	869	1040	NULL	869	true	1040.8846153846155	869.06757	1040.8846153846155	1970-01-01 00:14:29.067567567
-1913.89022869026920	-1215.20207900203840	0.8371751679996	267156.82702703945592392	0.8594594595	870	1043	NULL	870	true	1043.0307692307692	870.85944	1043.0307692307692	1970-01-01 00:14:30.859459459
+1836.44199584197700	-1166.02723492725400	0.8372697814834	245972.55810810255804469	5.6189189189	835	1000	NULL	835	true	1000.823076923077	835.6189	1000.82307692307700	1970-01-01 00:13:55.618918918
+1856.13222453224620	-1178.52931392929240	0.8372449787014	251275.44324324968747899	4.5783783784	844	1011	NULL	844	true	1011.5538461538462	844.57837	1011.55384615384620	1970-01-01 00:14:04.578378378
+1858.75758835761550	-1180.19625779623100	0.8372417113669	251986.76756757564861519	5.7729729730	845	1012	NULL	845	true	1012.9846153846155	845.77295	1012.98461538461550	1970-01-01 00:14:05.772972973
+1862.69563409566930	-1182.69667359663860	0.8372368276345	253055.63918919969667286	7.5648648649	847	1015	NULL	847	true	1015.1307692307693	847.5649	1015.13076923076930	1970-01-01 00:14:07.564864864
+1883.69854469852330	-1196.03222453224660	0.8372111259286	258794.49324323677116559	7.1216216216	857	1026	NULL	857	true	1026.5769230769233	857.12164	1026.57692307692330	1970-01-01 00:14:17.121621621
+1886.32390852389240	-1197.69916839918480	0.8372079534582	259516.37432431944456816	8.3162162162	858	1028	NULL	858	true	1028.0076923076924	858.3162	1028.00769230769240	1970-01-01 00:14:18.316216216
+1887.63659043657700	-1198.53264033265400	0.8372063705322	259877.69189188782259834	8.9135135135	858	1028	NULL	858	true	1028.723076923077	858.9135	1028.72307692307700	1970-01-01 00:14:18.913513513
+1895.51268191268460	-1203.53347193346920	0.8371969190171	262050.87567567649292835	2.4972972973	862	1033	NULL	862	true	1033.0153846153846	862.4973	1033.01538461538460	1970-01-01 00:14:22.497297297
+1909.95218295221550	-1212.70166320163100	0.8371797936946	266058.54729730725574014	9.0675675676	869	1040	NULL	869	true	1040.8846153846155	869.06757	1040.88461538461550	1970-01-01 00:14:29.067567567
+1913.89022869026920	-1215.20207900203840	0.8371751679996	267156.82702703945592392	0.8594594595	870	1043	NULL	870	true	1043.0307692307692	870.85944	1043.03076923076920	1970-01-01 00:14:30.859459459
 PREHOOK: query: SELECT SUM(HASH(*))
 FROM (SELECT cdecimal1 + cdecimal2 as c1, cdecimal1 - (2*cdecimal2) as c2, ((cdecimal1+2.34)/cdecimal2) as c3, (cdecimal1 * (cdecimal2/3.4)) as c4, cdecimal1 % 10 as c5, CAST(cdecimal1 AS INT) as c6, CAST(cdecimal2 AS SMALLINT) as c7, CAST(cdecimal2 AS TINYINT) as c8, CAST(cdecimal1 AS BIGINT) as c9, CAST (cdecimal1 AS BOOLEAN) as c10, CAST(cdecimal2 AS DOUBLE) as c11, CAST(cdecimal1 AS FLOAT) as c12, CAST(cdecimal2 AS STRING) as c13, CAST(cdecimal1 AS TIMESTAMP) as c14 FROM decimal_test_n1 WHERE cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdecimal2 > 1000 AND cdouble IS NOT NULL
 ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q
@@ -188,7 +188,7 @@ ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_test_n1
 #### A masked pattern was here ####
--1300490595129
+1506342314829
 PREHOOK: query: CREATE TABLE decimal_test_small_n0 STORED AS ORC AS SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(10,3)) AS cdecimal1, CAST (((cdouble*9.3)/13) AS DECIMAL(7,2)) AS cdecimal2 FROM alltypesorc
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
@@ -245,7 +245,7 @@ STAGE PLANS:
                     predicate: ((cdecimal1 < 12345.5678) and (cdecimal1 > 0) and (cdecimal2 <> 0) and (cdecimal2 > 1000) and cdouble is not null) (type: boolean)
                     Statistics: Num rows: 455 Data size: 100294 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: (cdecimal1 + cdecimal2) (type: decimal(11,3)), (cdecimal1 - (2 * cdecimal2)) (type: decimal(11,3)), ((cdecimal1 + 2.34) / cdecimal2) (type: decimal(21,11)), (cdecimal1 * (cdecimal2 / 3.4)) (type: decimal(23,9)), (cdecimal1 % 10) (type: decimal(5,3)), UDFToInteger(cdecimal1) (type: int), UDFToShort(cdecimal2) (type: smallint), UDFToByte(cdecimal2) (type: tinyint), UDFToLong(cdecimal1) (type: bigint), UDFToBoolean(cdecimal1) (type: boolean), UDFToDouble(cdecimal2) (type: double), UDFToFloat(cdecimal1) (type: float), UDFToString(cdecimal2) (type: string), CAST( cdecimal1 AS TIMESTAMP) (type: timestamp)
+                      expressions: (cdecimal1 + cdecimal2) (type: decimal(11,3)), (cdecimal1 - (2 * cdecimal2)) (type: decimal(11,3)), ((cdecimal1 + 2.34) / cdecimal2) (type: decimal(21,11)), (cdecimal1 * (cdecimal2 / 3.4)) (type: decimal(23,9)), (cdecimal1 % 10) (type: decimal(5,3)), UDFToInteger(cdecimal1) (type: int), UDFToShort(cdecimal2) (type: smallint), UDFToByte(cdecimal2) (type: tinyint), UDFToLong(cdecimal1) (type: bigint), UDFToBoolean(cdecimal1) (type: boolean), UDFToDouble(cdecimal2) (type: double), UDFToFloat(cdecimal1) (type: float), CAST( cdecimal2 AS STRING) (type: string), CAST( cdecimal1 AS TIMESTAMP) (type: timestamp)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
                       Select Vectorization:
                           className: VectorSelectOperator
@@ -362,4 +362,4 @@ ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_test_small_n0
 #### A masked pattern was here ####
-1273824888155
+1252336297085

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/vector_string_concat.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_string_concat.q.out b/ql/src/test/results/clientpositive/llap/vector_string_concat.q.out
index c6b3dcc..a4f32f1 100644
--- a/ql/src/test/results/clientpositive/llap/vector_string_concat.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_string_concat.q.out
@@ -344,7 +344,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: concat(concat(concat('Quarter ', UDFToString(UDFToInteger(((UDFToDouble((month(dt) - 1)) / 3.0D) + 1.0D)))), '-'), UDFToString(year(dt))) (type: string)
+                    expressions: concat(concat(concat('Quarter ', CAST( UDFToInteger(((UDFToDouble((month(dt) - 1)) / 3.0D) + 1.0D)) AS STRING)), '-'), CAST( year(dt) AS STRING)) (type: string)
                     outputColumnNames: _col0
                     Select Vectorization:
                         className: VectorSelectOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/vector_udf1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_udf1.q.out b/ql/src/test/results/clientpositive/llap/vector_udf1.q.out
index aef23fd..16b5948 100644
--- a/ql/src/test/results/clientpositive/llap/vector_udf1.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_udf1.q.out
@@ -66,7 +66,7 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:c1:string, 1:c2:string, 2:c3:varchar(10), 3:c4:varchar(20), 4:d1:string, 5:d2:string, 6:d3:varchar(10), 7:d4:varchar(10), 8:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: concat(c1, c2) (type: string), concat(c3, c4) (type: varchar(30)), (concat(c1, c2) = UDFToString(concat(c3, c4))) (type: boolean)
+                    expressions: concat(c1, c2) (type: string), concat(c3, c4) (type: varchar(30)), (concat(c1, c2) = CAST( concat(c3, c4) AS STRING)) (type: boolean)
                     outputColumnNames: _col0, _col1, _col2
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -167,7 +167,7 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:c1:string, 1:c2:string, 2:c3:varchar(10), 3:c4:varchar(20), 4:d1:string, 5:d2:string, 6:d3:varchar(10), 7:d4:varchar(10), 8:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: upper(c2) (type: string), upper(c4) (type: varchar(20)), (upper(c2) = UDFToString(upper(c4))) (type: boolean)
+                    expressions: upper(c2) (type: string), upper(c4) (type: varchar(20)), (upper(c2) = CAST( upper(c4) AS STRING)) (type: boolean)
                     outputColumnNames: _col0, _col1, _col2
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -268,7 +268,7 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:c1:string, 1:c2:string, 2:c3:varchar(10), 3:c4:varchar(20), 4:d1:string, 5:d2:string, 6:d3:varchar(10), 7:d4:varchar(10), 8:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: lower(c2) (type: string), lower(c4) (type: varchar(20)), (lower(c2) = UDFToString(lower(c4))) (type: boolean)
+                    expressions: lower(c2) (type: string), lower(c4) (type: varchar(20)), (lower(c2) = CAST( lower(c4) AS STRING)) (type: boolean)
                     outputColumnNames: _col0, _col1, _col2
                     Select Vectorization:
                         className: VectorSelectOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out b/ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out
index 7270d75..4d9d865 100644
--- a/ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out
@@ -417,9 +417,9 @@ STAGE PLANS:
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: UDFToString(_col1) (type: string)
+                        key expressions: CAST( _col1 AS STRING) (type: string)
                         sort order: +
-                        Map-reduce partition columns: UDFToString(_col1) (type: string)
+                        Map-reduce partition columns: CAST( _col1 AS STRING) (type: string)
                         Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: int), _col1 (type: varchar(10))
             Execution mode: vectorized, llap
@@ -450,7 +450,7 @@ STAGE PLANS:
                         condition map:
                              Inner Join 0 to 1
                         keys:
-                          0 UDFToString(_col1) (type: string)
+                          0 CAST( _col1 AS STRING) (type: string)
                           1 _col1 (type: string)
                         outputColumnNames: _col0, _col1, _col2, _col3
                         input vertices:

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out b/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out
index 8fecb02..e7f774b 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out
@@ -178,7 +178,7 @@ STAGE PLANS:
                     predicate: ((cbigint % 250) = 0) (type: boolean)
                     Statistics: Num rows: 6144 Data size: 842180 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
-                      expressions: UDFToBoolean(ctinyint) (type: boolean), UDFToBoolean(csmallint) (type: boolean), UDFToBoolean(cint) (type: boolean), UDFToBoolean(cbigint) (type: boolean), UDFToBoolean(cfloat) (type: boolean), UDFToBoolean(cdouble) (type: boolean), cboolean1 (type: boolean), UDFToBoolean((cbigint * 0L)) (type: boolean), UDFToBoolean(ctimestamp1) (type: boolean), UDFToBoolean(cstring1) (type: boolean), UDFToInteger(ctinyint) (type: int), UDFToInteger(csmallint) (type: int), cint (type: int), UDFToInteger(cbigint) (type: int), UDFToInteger(cfloat) (type: int), UDFToInteger(cdouble) (type: int), UDFToInteger(cboolean1) (type: int), UDFToInteger(ctimestamp1) (type: int), UDFToInteger(cstring1) (type: int), UDFToInteger(substr(cstring1, 1, 1)) (type: int), UDFToByte(cfloat) (type: tinyint), UDFToShort(cfloat) (type: smallint), UDFToLong(cfloat) (type: bigint), UDFToDouble(ctinyint) (type: double), UDFToDouble(csmallint) (type: double), UDFToDouble(cint) (type: double),
  UDFToDouble(cbigint) (type: double), UDFToDouble(cfloat) (type: double), cdouble (type: double), UDFToDouble(cboolean1) (type: double), UDFToDouble(ctimestamp1) (type: double), UDFToDouble(cstring1) (type: double), UDFToDouble(substr(cstring1, 1, 1)) (type: double), UDFToFloat(cint) (type: float), UDFToFloat(cdouble) (type: float), CAST( ctinyint AS TIMESTAMP) (type: timestamp), CAST( csmallint AS TIMESTAMP) (type: timestamp), CAST( cint AS TIMESTAMP) (type: timestamp), CAST( cbigint AS TIMESTAMP) (type: timestamp), CAST( cfloat AS TIMESTAMP) (type: timestamp), CAST( cdouble AS TIMESTAMP) (type: timestamp), CAST( cboolean1 AS TIMESTAMP) (type: timestamp), CAST( (cbigint * 0L) AS TIMESTAMP) (type: timestamp), CAST( CAST( ctimestamp1 AS DATE) AS TIMESTAMP) (type: timestamp), ctimestamp1 (type: timestamp), CAST( cstring1 AS TIMESTAMP) (type: timestamp), CAST( substr(cstring1, 1, 1) AS TIMESTAMP) (type: timestamp), UDFToString(ctinyint) (type: string), UDFToString(csmallint) (type: str
 ing), UDFToString(cint) (type: string), UDFToString(cbigint) (type: string), UDFToString(cfloat) (type: string), UDFToString(cdouble) (type: string), UDFToString(cboolean1) (type: string), UDFToString((cbigint * 0L)) (type: string), UDFToString(ctimestamp1) (type: string), cstring1 (type: string), UDFToString(CAST( cstring1 AS CHAR(10))) (type: string), UDFToString(CAST( cstring1 AS varchar(10))) (type: string), UDFToFloat(UDFToInteger(cfloat)) (type: float), UDFToDouble((cint * 2)) (type: double), UDFToString(sin(cfloat)) (type: string), (UDFToDouble(UDFToFloat(cint)) + UDFToDouble(cboolean1)) (type: double)
+                      expressions: UDFToBoolean(ctinyint) (type: boolean), UDFToBoolean(csmallint) (type: boolean), UDFToBoolean(cint) (type: boolean), UDFToBoolean(cbigint) (type: boolean), UDFToBoolean(cfloat) (type: boolean), UDFToBoolean(cdouble) (type: boolean), cboolean1 (type: boolean), UDFToBoolean((cbigint * 0L)) (type: boolean), UDFToBoolean(ctimestamp1) (type: boolean), UDFToBoolean(cstring1) (type: boolean), UDFToInteger(ctinyint) (type: int), UDFToInteger(csmallint) (type: int), cint (type: int), UDFToInteger(cbigint) (type: int), UDFToInteger(cfloat) (type: int), UDFToInteger(cdouble) (type: int), UDFToInteger(cboolean1) (type: int), UDFToInteger(ctimestamp1) (type: int), UDFToInteger(cstring1) (type: int), UDFToInteger(substr(cstring1, 1, 1)) (type: int), UDFToByte(cfloat) (type: tinyint), UDFToShort(cfloat) (type: smallint), UDFToLong(cfloat) (type: bigint), UDFToDouble(ctinyint) (type: double), UDFToDouble(csmallint) (type: double), UDFToDouble(cint) (type: double),
  UDFToDouble(cbigint) (type: double), UDFToDouble(cfloat) (type: double), cdouble (type: double), UDFToDouble(cboolean1) (type: double), UDFToDouble(ctimestamp1) (type: double), UDFToDouble(cstring1) (type: double), UDFToDouble(substr(cstring1, 1, 1)) (type: double), UDFToFloat(cint) (type: float), UDFToFloat(cdouble) (type: float), CAST( ctinyint AS TIMESTAMP) (type: timestamp), CAST( csmallint AS TIMESTAMP) (type: timestamp), CAST( cint AS TIMESTAMP) (type: timestamp), CAST( cbigint AS TIMESTAMP) (type: timestamp), CAST( cfloat AS TIMESTAMP) (type: timestamp), CAST( cdouble AS TIMESTAMP) (type: timestamp), CAST( cboolean1 AS TIMESTAMP) (type: timestamp), CAST( (cbigint * 0L) AS TIMESTAMP) (type: timestamp), CAST( CAST( ctimestamp1 AS DATE) AS TIMESTAMP) (type: timestamp), ctimestamp1 (type: timestamp), CAST( cstring1 AS TIMESTAMP) (type: timestamp), CAST( substr(cstring1, 1, 1) AS TIMESTAMP) (type: timestamp), CAST( ctinyint AS STRING) (type: string), CAST( csmallint AS STRING) (t
 ype: string), CAST( cint AS STRING) (type: string), CAST( cbigint AS STRING) (type: string), CAST( cfloat AS STRING) (type: string), CAST( cdouble AS STRING) (type: string), CAST( cboolean1 AS STRING) (type: string), CAST( (cbigint * 0L) AS STRING) (type: string), CAST( ctimestamp1 AS STRING) (type: string), cstring1 (type: string), CAST( CAST( cstring1 AS CHAR(10)) AS STRING) (type: string), CAST( CAST( cstring1 AS varchar(10)) AS STRING) (type: string), UDFToFloat(UDFToInteger(cfloat)) (type: float), UDFToDouble((cint * 2)) (type: double), CAST( sin(cfloat) AS STRING) (type: string), (UDFToDouble(UDFToFloat(cint)) + UDFToDouble(cboolean1)) (type: double)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35, _col36, _col37, _col38, _col39, _col40, _col41, _col42, _col43, _col44, _col45, _col46, _col47, _col48, _col49, _col50, _col51, _col52, _col53, _col54, _col55, _col56, _col57, _col58, _col59, _col60, _col61, _col62
                       Select Vectorization:
                           className: VectorSelectOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out
index 8ee96d3..ba004e9 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out
@@ -2235,9 +2235,9 @@ STAGE PLANS:
                     outputColumnNames: _col0
                     Statistics: Num rows: 2000 Data size: 368000 Basic stats: COMPLETE Column stats: COMPLETE
                     Reduce Output Operator
-                      key expressions: UDFToString((UDFToDouble(_col0) * 2.0D)) (type: string)
+                      key expressions: CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string)
                       sort order: +
-                      Map-reduce partition columns: UDFToString((UDFToDouble(_col0) * 2.0D)) (type: string)
+                      Map-reduce partition columns: CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string)
                       Statistics: Num rows: 2000 Data size: 368000 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: vectorized, llap
             LLAP IO: no inputs
@@ -2264,12 +2264,12 @@ STAGE PLANS:
                       outputColumnNames: _col0
                       Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: UDFToString(_col0) (type: string)
+                        key expressions: CAST( _col0 AS STRING) (type: string)
                         sort order: +
-                        Map-reduce partition columns: UDFToString(_col0) (type: string)
+                        Map-reduce partition columns: CAST( _col0 AS STRING) (type: string)
                         Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
-                        expressions: UDFToString(_col0) (type: string)
+                        expressions: CAST( _col0 AS STRING) (type: string)
                         outputColumnNames: _col0
                         Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE
                         Group By Operator
@@ -2280,7 +2280,7 @@ STAGE PLANS:
                           Dynamic Partitioning Event Operator
                             Target column: hr (string)
                             Target Input: srcpart
-                            Partition key expr: UDFToString((UDFToDouble(hr) * 2.0D))
+                            Partition key expr: CAST( (UDFToDouble(hr) * 2.0D) AS STRING)
                             Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE
                             Target Vertex: Map 1
             Execution mode: vectorized, llap
@@ -2301,8 +2301,8 @@ STAGE PLANS:
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 UDFToString((UDFToDouble(_col0) * 2.0D)) (type: string)
-                  1 UDFToString(_col0) (type: string)
+                  0 CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string)
+                  1 CAST( _col0 AS STRING) (type: string)
                 Statistics: Num rows: 2200 Data size: 404800 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   aggregations: count()

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/multi_insert_mixed.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/multi_insert_mixed.q.out b/ql/src/test/results/clientpositive/multi_insert_mixed.q.out
index e9729a4..478a379 100644
--- a/ql/src/test/results/clientpositive/multi_insert_mixed.q.out
+++ b/ql/src/test/results/clientpositive/multi_insert_mixed.q.out
@@ -142,7 +142,7 @@ STAGE PLANS:
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
-          expressions: KEY.reducesinkkey0 (type: string), UDFToString(VALUE._col0) (type: string)
+          expressions: KEY.reducesinkkey0 (type: string), CAST( VALUE._col0 AS STRING) (type: string)
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
@@ -229,7 +229,7 @@ STAGE PLANS:
       Execution mode: vectorized
       Reduce Operator Tree:
         Select Operator
-          expressions: KEY.reducesinkkey0 (type: string), UDFToString(VALUE._col0) (type: string)
+          expressions: KEY.reducesinkkey0 (type: string), CAST( VALUE._col0 AS STRING) (type: string)
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           File Output Operator


[11/11] hive git commit: HIVE-19416 : merge master into branch (Sergey Shelukhin) 0724

Posted by se...@apache.org.
HIVE-19416 : merge master into branch (Sergey Shelukhin) 0724


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5e7a8b59
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5e7a8b59
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5e7a8b59

Branch: refs/heads/master-txnstats
Commit: 5e7a8b59cae36ccdd70c6540cf6ab3d2dfe1e735
Parents: 7bd688b 26f4d8e
Author: sergey <se...@apache.org>
Authored: Tue Jul 24 12:40:08 2018 -0700
Committer: sergey <se...@apache.org>
Committed: Tue Jul 24 12:40:08 2018 -0700

----------------------------------------------------------------------
 .../predicate/TestAccumuloRangeGenerator.java   |   7 +-
 .../test/resources/testconfiguration.properties |   1 +
 .../DTIColumnArithmeticDTIColumnNoConvert.txt   |   1 -
 .../DTIScalarArithmeticDTIColumnNoConvert.txt   |   1 -
 .../org/apache/hadoop/hive/ql/ErrorMsg.java     |   3 +
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |   7 +-
 .../apache/hadoop/hive/ql/exec/MoveTask.java    |   2 +-
 .../spark/status/impl/RemoteSparkJobStatus.java | 108 +++-
 .../ql/exec/vector/VectorizationContext.java    |  26 +-
 .../vector/expressions/CastDecimalToString.java |   2 +-
 .../hive/ql/index/IndexPredicateAnalyzer.java   |   2 +
 .../apache/hadoop/hive/ql/metadata/Hive.java    |  22 +-
 .../calcite/translator/RexNodeConverter.java    |   4 +-
 .../hive/ql/optimizer/physical/Vectorizer.java  |   3 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |   1 +
 .../hadoop/hive/ql/parse/TezCompiler.java       |  42 ++
 .../hadoop/hive/ql/plan/LoadTableDesc.java      |  10 +-
 .../apache/hadoop/hive/ql/plan/TableDesc.java   |   3 +
 .../apache/hadoop/hive/ql/udf/UDFToString.java  | 181 ------
 .../hive/ql/udf/generic/GenericUDFToString.java |  79 +++
 .../vector/expressions/TestVectorTypeCasts.java |   6 +-
 .../queries/clientpositive/external_insert.q    |  14 +
 .../clientpositive/murmur_hash_migration.q      |  61 ++
 .../clientpositive/autoColumnStats_6.q.out      |   2 +-
 .../clientpositive/bucket_map_join_spark1.q.out |   4 +-
 .../clientpositive/bucket_map_join_spark2.q.out |   4 +-
 .../clientpositive/bucket_map_join_spark3.q.out |   4 +-
 .../results/clientpositive/bucketmapjoin5.q.out |   4 +-
 .../clientpositive/bucketmapjoin_negative.q.out |   2 +-
 .../bucketmapjoin_negative2.q.out               |   2 +-
 .../bucketsortoptimize_insert_3.q.out           |   2 +-
 .../clientpositive/char_pad_convert.q.out       |   4 +-
 .../column_pruner_multiple_children.q.out       |   2 +-
 .../test/results/clientpositive/decimal_2.q.out |   4 +-
 .../clientpositive/external_insert.q.out        | 158 +++++
 .../test/results/clientpositive/groupby12.q.out |   2 +-
 .../test/results/clientpositive/groupby5.q.out  |   2 +-
 .../clientpositive/groupby5_noskew.q.out        |   2 +-
 .../results/clientpositive/groupby7_map.q.out   |   4 +-
 .../groupby7_map_multi_single_reducer.q.out     |   4 +-
 .../clientpositive/groupby7_map_skew.q.out      |   4 +-
 .../clientpositive/groupby7_noskew.q.out        |   4 +-
 .../groupby7_noskew_multi_single_reducer.q.out  |   4 +-
 .../test/results/clientpositive/groupby8.q.out  |   8 +-
 .../results/clientpositive/groupby8_map.q.out   |   4 +-
 .../clientpositive/groupby8_map_skew.q.out      |   4 +-
 .../clientpositive/groupby8_noskew.q.out        |   4 +-
 .../test/results/clientpositive/groupby9.q.out  |  20 +-
 .../clientpositive/groupby_cube_multi_gby.q.out |   2 +-
 .../clientpositive/groupby_position.q.out       |   8 +-
 .../clientpositive/groupby_sort_1_23.q.out      |   2 +-
 .../clientpositive/groupby_sort_skew_1_23.q.out |   4 +-
 .../infer_bucket_sort_dyn_part.q.out            |   2 +-
 .../infer_bucket_sort_grouping_operators.q.out  |   6 +-
 .../infer_bucket_sort_map_operators.q.out       |   6 +-
 .../infer_bucket_sort_num_buckets.q.out         |   2 +-
 .../results/clientpositive/input_part10.q.out   |   2 +-
 .../clientpositive/llap/bucketmapjoin1.q.out    |   4 +-
 .../clientpositive/llap/bucketmapjoin2.q.out    |   6 +-
 .../clientpositive/llap/bucketmapjoin3.q.out    |   4 +-
 .../clientpositive/llap/bucketmapjoin4.q.out    |   4 +-
 .../llap/default_constraint.q.out               |   4 +-
 .../llap/dynamic_partition_pruning.q.out        |  16 +-
 .../llap/insert_into_default_keyword.q.out      |  20 +-
 .../llap/materialized_view_rewrite_6.q.out      |   4 +-
 .../llap/multi_insert_lateral_view.q.out        |  32 +-
 .../llap/murmur_hash_migration.q.out            | 618 +++++++++++++++++++
 .../clientpositive/llap/orc_merge1.q.out        |   6 +-
 .../clientpositive/llap/orc_merge10.q.out       |   6 +-
 .../clientpositive/llap/orc_merge2.q.out        |   2 +-
 .../clientpositive/llap/orc_merge_diff_fs.q.out |   6 +-
 .../clientpositive/llap/rcfile_merge2.q.out     |   2 +-
 .../llap/schema_evol_text_nonvec_part.q.out     |   4 +-
 ...hema_evol_text_nonvec_part_all_complex.q.out |  16 +-
 ...l_text_nonvec_part_all_complex_llap_io.q.out |  16 +-
 .../schema_evol_text_nonvec_part_llap_io.q.out  |   4 +-
 .../llap/schema_evol_text_vec_part.q.out        |   4 +-
 .../schema_evol_text_vec_part_all_complex.q.out |  16 +-
 .../llap/schema_evol_text_vecrow_part.q.out     |   4 +-
 ...hema_evol_text_vecrow_part_all_complex.q.out |  16 +-
 .../results/clientpositive/llap/stats11.q.out   |   4 +-
 .../clientpositive/llap/subquery_multi.q.out    |   6 +-
 .../llap/tez_union_multiinsert.q.out            |  30 +-
 .../results/clientpositive/llap/union6.q.out    |   2 +-
 .../clientpositive/llap/unionDistinct_1.q.out   |  18 +-
 .../clientpositive/llap/unionDistinct_3.q.out   |  10 +-
 .../llap/vector_case_when_1.q.out               |  10 +-
 .../llap/vector_char_mapjoin1.q.out             |   6 +-
 .../clientpositive/llap/vector_decimal_1.q.out  |   2 +-
 .../clientpositive/llap/vector_decimal_2.q.out  |   8 +-
 .../llap/vector_decimal_expressions.q.out       |  28 +-
 .../llap/vector_string_concat.q.out             |   2 +-
 .../clientpositive/llap/vector_udf1.q.out       |   6 +-
 .../llap/vector_varchar_mapjoin1.q.out          |   6 +-
 .../clientpositive/llap/vectorized_casts.q.out  |   2 +-
 .../vectorized_dynamic_partition_pruning.q.out  |  16 +-
 .../clientpositive/multi_insert_mixed.q.out     |   4 +-
 .../results/clientpositive/orc_merge1.q.out     |   6 +-
 .../results/clientpositive/orc_merge10.q.out    |   6 +-
 .../results/clientpositive/orc_merge2.q.out     |   2 +-
 .../clientpositive/orc_merge_diff_fs.q.out      |   6 +-
 .../clientpositive/perf/spark/query36.q.out     |   6 +-
 .../clientpositive/perf/spark/query70.q.out     |   6 +-
 .../clientpositive/perf/spark/query86.q.out     |   6 +-
 .../clientpositive/perf/tez/query36.q.out       |   4 +-
 .../clientpositive/perf/tez/query70.q.out       |   4 +-
 .../clientpositive/perf/tez/query86.q.out       |   4 +-
 .../results/clientpositive/show_functions.q.out |   1 -
 .../results/clientpositive/smb_mapjoin_20.q.out |  12 +-
 .../spark/bucket_map_join_spark1.q.out          |   4 +-
 .../spark/bucket_map_join_spark2.q.out          |   4 +-
 .../spark/bucket_map_join_spark3.q.out          |   4 +-
 .../clientpositive/spark/bucketmapjoin1.q.out   |   4 +-
 .../clientpositive/spark/bucketmapjoin2.q.out   |   6 +-
 .../clientpositive/spark/bucketmapjoin3.q.out   |   4 +-
 .../clientpositive/spark/bucketmapjoin4.q.out   |   4 +-
 .../clientpositive/spark/bucketmapjoin5.q.out   |   4 +-
 .../spark/bucketmapjoin_negative.q.out          |   2 +-
 .../spark/bucketmapjoin_negative2.q.out         |   2 +-
 .../spark/dynamic_rdd_cache.q.out               |   8 +-
 .../results/clientpositive/spark/groupby5.q.out |   2 +-
 .../clientpositive/spark/groupby5_noskew.q.out  |   2 +-
 .../clientpositive/spark/groupby7_map.q.out     |   4 +-
 .../groupby7_map_multi_single_reducer.q.out     |   4 +-
 .../spark/groupby7_map_skew.q.out               |   4 +-
 .../clientpositive/spark/groupby7_noskew.q.out  |   4 +-
 .../groupby7_noskew_multi_single_reducer.q.out  |   4 +-
 .../results/clientpositive/spark/groupby8.q.out |   8 +-
 .../clientpositive/spark/groupby8_map.q.out     |   4 +-
 .../spark/groupby8_map_skew.q.out               |   4 +-
 .../clientpositive/spark/groupby8_noskew.q.out  |   4 +-
 .../results/clientpositive/spark/groupby9.q.out |  20 +-
 .../clientpositive/spark/groupby_position.q.out |   8 +-
 .../spark/groupby_sort_1_23.q.out               |   2 +-
 .../spark/groupby_sort_skew_1_23.q.out          |   4 +-
 .../spark/infer_bucket_sort_map_operators.q.out |   6 +-
 .../spark/multi_insert_lateral_view.q.out       |  32 +-
 .../spark/multi_insert_mixed.q.out              |   4 +-
 .../clientpositive/spark/smb_mapjoin_20.q.out   |  12 +-
 .../spark/spark_dynamic_partition_pruning.q.out |  44 +-
 ...k_vectorized_dynamic_partition_pruning.q.out |  44 +-
 .../results/clientpositive/spark/stats1.q.out   |   2 +-
 .../clientpositive/spark/subquery_multi.q.out   |   6 +-
 .../results/clientpositive/spark/union17.q.out  |   8 +-
 .../results/clientpositive/spark/union18.q.out  |   2 +-
 .../results/clientpositive/spark/union19.q.out  |   4 +-
 .../results/clientpositive/spark/union20.q.out  |   4 +-
 .../results/clientpositive/spark/union32.q.out  |   4 +-
 .../results/clientpositive/spark/union33.q.out  |   4 +-
 .../results/clientpositive/spark/union6.q.out   |   2 +-
 .../clientpositive/spark/union_remove_19.q.out  |   4 +-
 .../spark/vector_string_concat.q.out            |   2 +-
 ql/src/test/results/clientpositive/stats1.q.out |   2 +-
 .../results/clientpositive/tablevalues.q.out    |   2 +-
 ql/src/test/results/clientpositive/udf3.q.out   |   2 +-
 .../results/clientpositive/udf_string.q.out     |   9 +-
 .../test/results/clientpositive/union17.q.out   |   6 +-
 .../test/results/clientpositive/union18.q.out   |   2 +-
 .../test/results/clientpositive/union19.q.out   |   4 +-
 .../test/results/clientpositive/union20.q.out   |   4 +-
 .../test/results/clientpositive/union32.q.out   |   4 +-
 .../test/results/clientpositive/union33.q.out   |   4 +-
 ql/src/test/results/clientpositive/union6.q.out |   2 +-
 .../clientpositive/union_remove_19.q.out        |   4 +-
 .../clientpositive/vector_case_when_1.q.out     |  10 +-
 .../clientpositive/vector_char_mapjoin1.q.out   |   4 +-
 .../clientpositive/vector_decimal_1.q.out       |   2 +-
 .../vector_decimal_expressions.q.out            |  28 +-
 .../clientpositive/vector_string_concat.q.out   |   2 +-
 .../vector_varchar_mapjoin1.q.out               |   4 +-
 .../clientpositive/vectorized_casts.q.out       |   2 +-
 .../PrimitiveObjectInspectorConverter.java      |   3 +-
 .../PrimitiveObjectInspectorUtils.java          |   3 +-
 .../TestObjectInspectorConverters.java          |  46 +-
 .../TestPrimitiveObjectInspectorUtils.java      |  14 +
 175 files changed, 1649 insertions(+), 737 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/5e7a8b59/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/5e7a8b59/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/5e7a8b59/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/5e7a8b59/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out
----------------------------------------------------------------------


[05/11] hive git commit: HIVE-20082: HiveDecimal to string conversion doesn't format the decimal correctly (Jason Dere, reviewed by Ashutosh Chauhan)

Posted by se...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out b/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out
index 22fe91c..917c40c 100644
--- a/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out
+++ b/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out
@@ -71,7 +71,7 @@ STAGE PLANS:
                         outputColumnNames: _col0, _col5
                         Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: string), UDFToString(_col5) (type: string)
+                          expressions: _col0 (type: string), CAST( _col5 AS STRING) (type: string)
                           outputColumnNames: _col0, _col1
                           Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                           File Output Operator
@@ -93,7 +93,7 @@ STAGE PLANS:
                           outputColumnNames: _col0, _col5
                           Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                           Select Operator
-                            expressions: _col0 (type: string), UDFToString(_col5) (type: string)
+                            expressions: _col0 (type: string), CAST( _col5 AS STRING) (type: string)
                             outputColumnNames: _col0, _col1
                             Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                             File Output Operator
@@ -114,7 +114,7 @@ STAGE PLANS:
                         outputColumnNames: _col0, _col5
                         Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: string), UDFToString(_col5) (type: string)
+                          expressions: _col0 (type: string), CAST( _col5 AS STRING) (type: string)
                           outputColumnNames: _col0, _col1
                           Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                           File Output Operator
@@ -136,7 +136,7 @@ STAGE PLANS:
                           outputColumnNames: _col0, _col5
                           Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                           Select Operator
-                            expressions: _col0 (type: string), UDFToString(_col5) (type: string)
+                            expressions: _col0 (type: string), CAST( _col5 AS STRING) (type: string)
                             outputColumnNames: _col0, _col1
                             Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                             File Output Operator
@@ -381,7 +381,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -402,7 +402,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -598,7 +598,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -623,7 +623,7 @@ STAGE PLANS:
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                      expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
@@ -644,7 +644,7 @@ STAGE PLANS:
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                      expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
@@ -917,7 +917,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), UDFToString(_col1) (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -937,7 +937,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), UDFToString(_col1) (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -957,7 +957,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 5 Data size: 52 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 5 Data size: 52 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -1266,7 +1266,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -1286,7 +1286,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -1311,7 +1311,7 @@ STAGE PLANS:
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                      expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
@@ -1332,7 +1332,7 @@ STAGE PLANS:
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                      expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/multi_insert_mixed.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/multi_insert_mixed.q.out b/ql/src/test/results/clientpositive/spark/multi_insert_mixed.q.out
index 0dde265..79de449 100644
--- a/ql/src/test/results/clientpositive/spark/multi_insert_mixed.q.out
+++ b/ql/src/test/results/clientpositive/spark/multi_insert_mixed.q.out
@@ -130,7 +130,7 @@ STAGE PLANS:
             Execution mode: vectorized
             Reduce Operator Tree:
               Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), UDFToString(VALUE._col0) (type: string)
+                expressions: KEY.reducesinkkey0 (type: string), CAST( VALUE._col0 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
@@ -159,7 +159,7 @@ STAGE PLANS:
             Execution mode: vectorized
             Reduce Operator Tree:
               Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), UDFToString(VALUE._col0) (type: string)
+                expressions: KEY.reducesinkkey0 (type: string), CAST( VALUE._col0 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/smb_mapjoin_20.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/smb_mapjoin_20.q.out b/ql/src/test/results/clientpositive/spark/smb_mapjoin_20.q.out
index fd0f1c0..e6481ae 100644
--- a/ql/src/test/results/clientpositive/spark/smb_mapjoin_20.q.out
+++ b/ql/src/test/results/clientpositive/spark/smb_mapjoin_20.q.out
@@ -61,9 +61,9 @@ STAGE PLANS:
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
-                      key expressions: UDFToString(_col0) (type: string)
+                      key expressions: CAST( _col0 AS STRING) (type: string)
                       sort order: +
-                      Map-reduce partition columns: UDFToString(_col0) (type: string)
+                      Map-reduce partition columns: CAST( _col0 AS STRING) (type: string)
                       Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col0 (type: int), _col1 (type: string)
             Execution mode: vectorized
@@ -71,7 +71,7 @@ STAGE PLANS:
             Execution mode: vectorized
             Reduce Operator Tree:
               Select Operator
-                expressions: UDFToString(VALUE._col0) (type: string), VALUE._col1 (type: string), VALUE._col1 (type: string)
+                expressions: CAST( VALUE._col0 AS STRING) (type: string), VALUE._col1 (type: string), VALUE._col1 (type: string)
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
@@ -1358,9 +1358,9 @@ STAGE PLANS:
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
-                      key expressions: UDFToString(_col0) (type: string)
+                      key expressions: CAST( _col0 AS STRING) (type: string)
                       sort order: +
-                      Map-reduce partition columns: UDFToString(_col0) (type: string)
+                      Map-reduce partition columns: CAST( _col0 AS STRING) (type: string)
                       Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col0 (type: int), _col1 (type: string)
             Execution mode: vectorized
@@ -1368,7 +1368,7 @@ STAGE PLANS:
             Execution mode: vectorized
             Reduce Operator Tree:
               Select Operator
-                expressions: UDFToString(VALUE._col0) (type: string), VALUE._col1 (type: string), VALUE._col1 (type: string)
+                expressions: CAST( VALUE._col0 AS STRING) (type: string), VALUE._col1 (type: string), VALUE._col1 (type: string)
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/spark_dynamic_partition_pruning.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/spark_dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/spark/spark_dynamic_partition_pruning.q.out
index cecee57..2420252 100644
--- a/ql/src/test/results/clientpositive/spark/spark_dynamic_partition_pruning.q.out
+++ b/ql/src/test/results/clientpositive/spark/spark_dynamic_partition_pruning.q.out
@@ -706,17 +706,17 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: srcpart_date_n4
-                  filterExpr: ((date = '2008-04-08') and abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)) is not null) (type: boolean)
+                  filterExpr: ((date = '2008-04-08') and abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)) is not null) (type: boolean)
                   Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((date = '2008-04-08') and abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)) is not null) (type: boolean)
+                    predicate: ((date = '2008-04-08') and abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)) is not null) (type: boolean)
                     Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: ds (type: string)
                       outputColumnNames: _col0
                       Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
-                        expressions: abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
+                        expressions: abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
                         outputColumnNames: _col0
                         Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                         Group By Operator
@@ -725,7 +725,7 @@ STAGE PLANS:
                           outputColumnNames: _col0
                           Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                           Spark Partition Pruning Sink Operator
-                            Target Columns: [Map 1 -> [ds:string (abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)))]]
+                            Target Columns: [Map 1 -> [ds:string (abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)))]]
                             Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE
 
   Stage: Stage-1
@@ -739,37 +739,37 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: srcpart
-                  filterExpr: abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)) is not null (type: boolean)
+                  filterExpr: abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)) is not null (type: boolean)
                   Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)) is not null (type: boolean)
+                    predicate: abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)) is not null (type: boolean)
                     Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: ds (type: string)
                       outputColumnNames: _col0
                       Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
+                        key expressions: abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
                         sort order: +
-                        Map-reduce partition columns: abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
+                        Map-reduce partition columns: abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
                         Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
         Map 4 
             Map Operator Tree:
                 TableScan
                   alias: srcpart_date_n4
-                  filterExpr: ((date = '2008-04-08') and abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)) is not null) (type: boolean)
+                  filterExpr: ((date = '2008-04-08') and abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)) is not null) (type: boolean)
                   Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((date = '2008-04-08') and abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)) is not null) (type: boolean)
+                    predicate: ((date = '2008-04-08') and abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)) is not null) (type: boolean)
                     Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: ds (type: string)
                       outputColumnNames: _col0
                       Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
+                        key expressions: abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
                         sort order: +
-                        Map-reduce partition columns: abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
+                        Map-reduce partition columns: abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
                         Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE
         Reducer 2 
             Reduce Operator Tree:
@@ -777,8 +777,8 @@ STAGE PLANS:
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
-                  1 abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
+                  0 abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
+                  1 abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
                 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   aggregations: count()
@@ -2372,7 +2372,7 @@ STAGE PLANS:
                       outputColumnNames: _col0
                       Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
-                        expressions: UDFToString(_col0) (type: string)
+                        expressions: CAST( _col0 AS STRING) (type: string)
                         outputColumnNames: _col0
                         Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE
                         Group By Operator
@@ -2381,7 +2381,7 @@ STAGE PLANS:
                           outputColumnNames: _col0
                           Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE
                           Spark Partition Pruning Sink Operator
-                            Target Columns: [Map 1 -> [hr:string (UDFToString((UDFToDouble(hr) * 2.0D)))]]
+                            Target Columns: [Map 1 -> [hr:string (CAST( (UDFToDouble(hr) * 2.0D) AS STRING))]]
                             Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE
 
   Stage: Stage-1
@@ -2402,9 +2402,9 @@ STAGE PLANS:
                     outputColumnNames: _col0
                     Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
-                      key expressions: UDFToString((UDFToDouble(_col0) * 2.0D)) (type: string)
+                      key expressions: CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string)
                       sort order: +
-                      Map-reduce partition columns: UDFToString((UDFToDouble(_col0) * 2.0D)) (type: string)
+                      Map-reduce partition columns: CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string)
                       Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
         Map 4 
             Map Operator Tree:
@@ -2420,9 +2420,9 @@ STAGE PLANS:
                       outputColumnNames: _col0
                       Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: UDFToString(_col0) (type: string)
+                        key expressions: CAST( _col0 AS STRING) (type: string)
                         sort order: +
-                        Map-reduce partition columns: UDFToString(_col0) (type: string)
+                        Map-reduce partition columns: CAST( _col0 AS STRING) (type: string)
                         Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE
         Reducer 2 
             Reduce Operator Tree:
@@ -2430,8 +2430,8 @@ STAGE PLANS:
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 UDFToString((UDFToDouble(_col0) * 2.0D)) (type: string)
-                  1 UDFToString(_col0) (type: string)
+                  0 CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string)
+                  1 CAST( _col0 AS STRING) (type: string)
                 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   aggregations: count()

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out
index c41dba9..e988760 100644
--- a/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out
+++ b/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out
@@ -1244,7 +1244,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: srcpart_date_n0
-                  filterExpr: ((date = '2008-04-08') and abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)) is not null) (type: boolean)
+                  filterExpr: ((date = '2008-04-08') and abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)) is not null) (type: boolean)
                   Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
@@ -1254,7 +1254,7 @@ STAGE PLANS:
                         className: VectorFilterOperator
                         native: true
                         predicateExpression: FilterExprAndExpr(children: FilterStringGroupColEqualStringScalar(col 1:string, val 2008-04-08), SelectColumnIsNotNull(col 4:bigint)(children: FuncAbsLongToLong(col 3:bigint)(children: LongColAddLongScalar(col 4:bigint, val 10)(children: LongColUnaryMinus(col 3:bigint)(children: CastStringToLong(col 6:string)(children: StringGroupColConcatStringScalar(col 5:string, val 0)(children: CastLongToString(col 4:int)(children: VectorUDFDayOfMonthDate(col 3, field DAY_OF_MONTH)(children: CastStringToDate(col 0:string) -> 3:date) -> 4:int) -> 5:string) -> 6:string) -> 3:bigint) -> 4:bigint) -> 3:bigint) -> 4:bigint))
-                    predicate: ((date = '2008-04-08') and abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)) is not null) (type: boolean)
+                    predicate: ((date = '2008-04-08') and abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)) is not null) (type: boolean)
                     Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: ds (type: string)
@@ -1265,7 +1265,7 @@ STAGE PLANS:
                           projectedOutputColumnNums: [0]
                       Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
-                        expressions: abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
+                        expressions: abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
                         outputColumnNames: _col0
                         Select Vectorization:
                             className: VectorSelectOperator
@@ -1286,7 +1286,7 @@ STAGE PLANS:
                           outputColumnNames: _col0
                           Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE
                           Spark Partition Pruning Sink Operator
-                            Target Columns: [Map 1 -> [ds:string (abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)))]]
+                            Target Columns: [Map 1 -> [ds:string (abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)))]]
                             Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE
             Execution mode: vectorized
             Map Vectorization:
@@ -1316,7 +1316,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: srcpart
-                  filterExpr: abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)) is not null (type: boolean)
+                  filterExpr: abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)) is not null (type: boolean)
                   Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
@@ -1326,7 +1326,7 @@ STAGE PLANS:
                         className: VectorFilterOperator
                         native: true
                         predicateExpression: SelectColumnIsNotNull(col 6:bigint)(children: FuncAbsLongToLong(col 5:bigint)(children: LongColAddLongScalar(col 6:bigint, val 10)(children: LongColUnaryMinus(col 5:bigint)(children: CastStringToLong(col 8:string)(children: StringGroupColConcatStringScalar(col 7:string, val 0)(children: CastLongToString(col 6:int)(children: VectorUDFDayOfMonthDate(col 5, field DAY_OF_MONTH)(children: CastStringToDate(col 2:string) -> 5:date) -> 6:int) -> 7:string) -> 8:string) -> 5:bigint) -> 6:bigint) -> 5:bigint) -> 6:bigint)
-                    predicate: abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)) is not null (type: boolean)
+                    predicate: abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)) is not null (type: boolean)
                     Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: ds (type: string)
@@ -1337,9 +1337,9 @@ STAGE PLANS:
                           projectedOutputColumnNums: [2]
                       Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
+                        key expressions: abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
                         sort order: +
-                        Map-reduce partition columns: abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
+                        Map-reduce partition columns: abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
                         Reduce Sink Vectorization:
                             className: VectorReduceSinkLongOperator
                             keyColumnNums: [6]
@@ -1369,7 +1369,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: srcpart_date_n0
-                  filterExpr: ((date = '2008-04-08') and abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)) is not null) (type: boolean)
+                  filterExpr: ((date = '2008-04-08') and abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)) is not null) (type: boolean)
                   Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
@@ -1379,7 +1379,7 @@ STAGE PLANS:
                         className: VectorFilterOperator
                         native: true
                         predicateExpression: FilterExprAndExpr(children: FilterStringGroupColEqualStringScalar(col 1:string, val 2008-04-08), SelectColumnIsNotNull(col 4:bigint)(children: FuncAbsLongToLong(col 3:bigint)(children: LongColAddLongScalar(col 4:bigint, val 10)(children: LongColUnaryMinus(col 3:bigint)(children: CastStringToLong(col 6:string)(children: StringGroupColConcatStringScalar(col 5:string, val 0)(children: CastLongToString(col 4:int)(children: VectorUDFDayOfMonthDate(col 3, field DAY_OF_MONTH)(children: CastStringToDate(col 0:string) -> 3:date) -> 4:int) -> 5:string) -> 6:string) -> 3:bigint) -> 4:bigint) -> 3:bigint) -> 4:bigint))
-                    predicate: ((date = '2008-04-08') and abs(((- UDFToLong(concat(UDFToString(day(CAST( ds AS DATE))), '0'))) + 10)) is not null) (type: boolean)
+                    predicate: ((date = '2008-04-08') and abs(((- UDFToLong(concat(CAST( day(CAST( ds AS DATE)) AS STRING), '0'))) + 10)) is not null) (type: boolean)
                     Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: ds (type: string)
@@ -1390,9 +1390,9 @@ STAGE PLANS:
                           projectedOutputColumnNums: [0]
                       Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
+                        key expressions: abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
                         sort order: +
-                        Map-reduce partition columns: abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
+                        Map-reduce partition columns: abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
                         Reduce Sink Vectorization:
                             className: VectorReduceSinkLongOperator
                             keyColumnNums: [4]
@@ -1428,8 +1428,8 @@ STAGE PLANS:
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
-                  1 abs(((- UDFToLong(concat(UDFToString(day(CAST( _col0 AS DATE))), '0'))) + 10)) (type: bigint)
+                  0 abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
+                  1 abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint)
                 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   aggregations: count()
@@ -4481,7 +4481,7 @@ STAGE PLANS:
                           projectedOutputColumnNums: [0]
                       Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
-                        expressions: UDFToString(_col0) (type: string)
+                        expressions: CAST( _col0 AS STRING) (type: string)
                         outputColumnNames: _col0
                         Select Vectorization:
                             className: VectorSelectOperator
@@ -4502,7 +4502,7 @@ STAGE PLANS:
                           outputColumnNames: _col0
                           Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE
                           Spark Partition Pruning Sink Operator
-                            Target Columns: [Map 1 -> [hr:string (UDFToString((UDFToDouble(hr) * 2.0D)))]]
+                            Target Columns: [Map 1 -> [hr:string (CAST( (UDFToDouble(hr) * 2.0D) AS STRING))]]
                             Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE
             Execution mode: vectorized
             Map Vectorization:
@@ -4546,9 +4546,9 @@ STAGE PLANS:
                         projectedOutputColumnNums: [3]
                     Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
-                      key expressions: UDFToString((UDFToDouble(_col0) * 2.0D)) (type: string)
+                      key expressions: CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string)
                       sort order: +
-                      Map-reduce partition columns: UDFToString((UDFToDouble(_col0) * 2.0D)) (type: string)
+                      Map-reduce partition columns: CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string)
                       Reduce Sink Vectorization:
                           className: VectorReduceSinkStringOperator
                           keyColumnNums: [7]
@@ -4599,9 +4599,9 @@ STAGE PLANS:
                           projectedOutputColumnNums: [0]
                       Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: UDFToString(_col0) (type: string)
+                        key expressions: CAST( _col0 AS STRING) (type: string)
                         sort order: +
-                        Map-reduce partition columns: UDFToString(_col0) (type: string)
+                        Map-reduce partition columns: CAST( _col0 AS STRING) (type: string)
                         Reduce Sink Vectorization:
                             className: VectorReduceSinkStringOperator
                             keyColumnNums: [4]
@@ -4637,8 +4637,8 @@ STAGE PLANS:
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 UDFToString((UDFToDouble(_col0) * 2.0D)) (type: string)
-                  1 UDFToString(_col0) (type: string)
+                  0 CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string)
+                  1 CAST( _col0 AS STRING) (type: string)
                 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   aggregations: count()

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/stats1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/stats1.q.out b/ql/src/test/results/clientpositive/spark/stats1.q.out
index b755b4c..8e1043c 100644
--- a/ql/src/test/results/clientpositive/spark/stats1.q.out
+++ b/ql/src/test/results/clientpositive/spark/stats1.q.out
@@ -76,7 +76,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/subquery_multi.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/subquery_multi.q.out b/ql/src/test/results/clientpositive/spark/subquery_multi.q.out
index f90b353..80a48b7 100644
--- a/ql/src/test/results/clientpositive/spark/subquery_multi.q.out
+++ b/ql/src/test/results/clientpositive/spark/subquery_multi.q.out
@@ -1037,7 +1037,7 @@ STAGE PLANS:
                      Left Outer Join 0 to 1
                 keys:
                   0 _col1 (type: string)
-                  1 UDFToString(_col0) (type: string)
+                  1 CAST( _col0 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col12
                 Statistics: Num rows: 1 Data size: 35834 Basic stats: PARTIAL Column stats: NONE
                 Filter Operator
@@ -1095,9 +1095,9 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   Reduce Output Operator
-                    key expressions: UDFToString(_col0) (type: string)
+                    key expressions: CAST( _col0 AS STRING) (type: string)
                     sort order: +
-                    Map-reduce partition columns: UDFToString(_col0) (type: string)
+                    Map-reduce partition columns: CAST( _col0 AS STRING) (type: string)
                     Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                     value expressions: _col1 (type: boolean)
 

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/union17.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/union17.q.out b/ql/src/test/results/clientpositive/spark/union17.q.out
index 93086a0..6b42bfc 100644
--- a/ql/src/test/results/clientpositive/spark/union17.q.out
+++ b/ql/src/test/results/clientpositive/spark/union17.q.out
@@ -111,7 +111,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: PARTIAL
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: PARTIAL
                   File Output Operator
@@ -131,7 +131,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 1 Data size: 280 Basic stats: COMPLETE Column stats: PARTIAL
                 Select Operator
-                  expressions: _col0 (type: string), _col1 (type: string), UDFToString(_col2) (type: string)
+                  expressions: _col0 (type: string), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 1 Data size: 456 Basic stats: COMPLETE Column stats: PARTIAL
                   File Output Operator
@@ -150,7 +150,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -172,7 +172,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/union18.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/union18.q.out b/ql/src/test/results/clientpositive/spark/union18.q.out
index 4b6c32d..6b94c9d 100644
--- a/ql/src/test/results/clientpositive/spark/union18.q.out
+++ b/ql/src/test/results/clientpositive/spark/union18.q.out
@@ -98,7 +98,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/union19.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/union19.q.out b/ql/src/test/results/clientpositive/spark/union19.q.out
index 6d47270..ee6cca8 100644
--- a/ql/src/test/results/clientpositive/spark/union19.q.out
+++ b/ql/src/test/results/clientpositive/spark/union19.q.out
@@ -103,7 +103,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -140,7 +140,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: PARTIAL
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: PARTIAL
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/union20.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/union20.q.out b/ql/src/test/results/clientpositive/spark/union20.q.out
index b967408..ec6a36b 100644
--- a/ql/src/test/results/clientpositive/spark/union20.q.out
+++ b/ql/src/test/results/clientpositive/spark/union20.q.out
@@ -99,7 +99,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator
@@ -134,7 +134,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/union32.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/union32.q.out b/ql/src/test/results/clientpositive/spark/union32.q.out
index 925392b..a5bc7b8 100644
--- a/ql/src/test/results/clientpositive/spark/union32.q.out
+++ b/ql/src/test/results/clientpositive/spark/union32.q.out
@@ -508,7 +508,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToString(CAST( _col1 AS CHAR(20))) (type: string)
+                  expressions: UDFToDouble(UDFToLong(_col0)) (type: double), CAST( CAST( _col1 AS CHAR(20)) AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -659,7 +659,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToString(CAST( _col1 AS CHAR(20))) (type: string)
+                  expressions: UDFToDouble(UDFToLong(_col0)) (type: double), CAST( CAST( _col1 AS CHAR(20)) AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/union33.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/union33.q.out b/ql/src/test/results/clientpositive/spark/union33.q.out
index 190b6c0..3117c56 100644
--- a/ql/src/test/results/clientpositive/spark/union33.q.out
+++ b/ql/src/test/results/clientpositive/spark/union33.q.out
@@ -106,7 +106,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -265,7 +265,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/union6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/union6.q.out b/ql/src/test/results/clientpositive/spark/union6.q.out
index fca52a3..a942c1e 100644
--- a/ql/src/test/results/clientpositive/spark/union6.q.out
+++ b/ql/src/test/results/clientpositive/spark/union6.q.out
@@ -74,7 +74,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+                  expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/union_remove_19.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/union_remove_19.q.out b/ql/src/test/results/clientpositive/spark/union_remove_19.q.out
index bf8abf1..d9b2328 100644
--- a/ql/src/test/results/clientpositive/spark/union_remove_19.q.out
+++ b/ql/src/test/results/clientpositive/spark/union_remove_19.q.out
@@ -388,7 +388,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToString((UDFToDouble(_col0) + UDFToDouble(_col0))) (type: string), _col1 (type: bigint)
+                  expressions: CAST( (UDFToDouble(_col0) + UDFToDouble(_col0)) AS STRING) (type: string), _col1 (type: bigint)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 2 Data size: 600 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -409,7 +409,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToString((UDFToDouble(_col0) + UDFToDouble(_col0))) (type: string), _col1 (type: bigint)
+                  expressions: CAST( (UDFToDouble(_col0) + UDFToDouble(_col0)) AS STRING) (type: string), _col1 (type: bigint)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 2 Data size: 600 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out b/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out
index cee7995..d8beb20 100644
--- a/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out
+++ b/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out
@@ -342,7 +342,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: concat(concat(concat('Quarter ', UDFToString(UDFToInteger(((UDFToDouble((month(dt) - 1)) / 3.0D) + 1.0D)))), '-'), UDFToString(year(dt))) (type: string)
+                    expressions: concat(concat(concat('Quarter ', CAST( UDFToInteger(((UDFToDouble((month(dt) - 1)) / 3.0D) + 1.0D)) AS STRING)), '-'), CAST( year(dt) AS STRING)) (type: string)
                     outputColumnNames: _col0
                     Select Vectorization:
                         className: VectorSelectOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/stats1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats1.q.out b/ql/src/test/results/clientpositive/stats1.q.out
index 10291ce..d77d94e 100644
--- a/ql/src/test/results/clientpositive/stats1.q.out
+++ b/ql/src/test/results/clientpositive/stats1.q.out
@@ -52,7 +52,7 @@ STAGE PLANS:
           outputColumnNames: _col0
           Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+            expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/tablevalues.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tablevalues.q.out b/ql/src/test/results/clientpositive/tablevalues.q.out
index 74fda00..d364934 100644
--- a/ql/src/test/results/clientpositive/tablevalues.q.out
+++ b/ql/src/test/results/clientpositive/tablevalues.q.out
@@ -57,7 +57,7 @@ STAGE PLANS:
                   predicate: (col2 = 9) (type: boolean)
                   Statistics: Num rows: 1 Data size: 640 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    expressions: UDFToString(col1) (type: string), '9' (type: string)
+                    expressions: CAST( col1 AS STRING) (type: string), '9' (type: string)
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 269 Basic stats: COMPLETE Column stats: COMPLETE
                     File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/udf3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf3.q.out b/ql/src/test/results/clientpositive/udf3.q.out
index 0f7c859..a016510 100644
--- a/ql/src/test/results/clientpositive/udf3.q.out
+++ b/ql/src/test/results/clientpositive/udf3.q.out
@@ -45,7 +45,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: UDFToString(_col0) (type: string), UDFToString(_col1) (type: string), UDFToString((UDFToDouble(_col1) / _col0)) (type: string), UDFToString(_col2) (type: string), UDFToString(_col3) (type: string)
+            expressions: CAST( _col0 AS STRING) (type: string), CAST( _col1 AS STRING) (type: string), CAST( (UDFToDouble(_col1) / _col0) AS STRING) (type: string), CAST( _col2 AS STRING) (type: string), CAST( _col3 AS STRING) (type: string)
             outputColumnNames: _col0, _col1, _col2, _col3, _col4
             Statistics: Num rows: 1 Data size: 920 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/udf_string.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_string.q.out b/ql/src/test/results/clientpositive/udf_string.q.out
index 71b9b29..aa764a9 100644
--- a/ql/src/test/results/clientpositive/udf_string.q.out
+++ b/ql/src/test/results/clientpositive/udf_string.q.out
@@ -2,11 +2,14 @@ PREHOOK: query: DESCRIBE FUNCTION string
 PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION string
 POSTHOOK: type: DESCFUNCTION
-There is no documentation for function 'string'
+CAST(<value> as STRING) - Converts the argument to a string value.
 PREHOOK: query: DESCRIBE FUNCTION EXTENDED string
 PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION EXTENDED string
 POSTHOOK: type: DESCFUNCTION
-There is no documentation for function 'string'
-Function class:org.apache.hadoop.hive.ql.udf.UDFToString
+CAST(<value> as STRING) - Converts the argument to a string value.
+Example:
+   > SELECT CAST(1234 AS string) FROM src LIMIT 1;
+  '1234'
+Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDFToString
 Function type:BUILTIN

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/union17.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union17.q.out b/ql/src/test/results/clientpositive/union17.q.out
index b7748c0..8f1a2d9 100644
--- a/ql/src/test/results/clientpositive/union17.q.out
+++ b/ql/src/test/results/clientpositive/union17.q.out
@@ -65,7 +65,7 @@ STAGE PLANS:
           outputColumnNames: _col0
           Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+            expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator
@@ -144,7 +144,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: PARTIAL
           Select Operator
-            expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+            expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: PARTIAL
             File Output Operator
@@ -238,7 +238,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 1 Data size: 280 Basic stats: COMPLETE Column stats: PARTIAL
           Select Operator
-            expressions: _col0 (type: string), _col1 (type: string), UDFToString(_col2) (type: string)
+            expressions: _col0 (type: string), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
             outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 1 Data size: 456 Basic stats: COMPLETE Column stats: PARTIAL
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/union18.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union18.q.out b/ql/src/test/results/clientpositive/union18.q.out
index 109fa8d..5e456dd 100644
--- a/ql/src/test/results/clientpositive/union18.q.out
+++ b/ql/src/test/results/clientpositive/union18.q.out
@@ -68,7 +68,7 @@ STAGE PLANS:
           outputColumnNames: _col0
           Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+            expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/union19.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union19.q.out b/ql/src/test/results/clientpositive/union19.q.out
index f57d8fb..e5c898d 100644
--- a/ql/src/test/results/clientpositive/union19.q.out
+++ b/ql/src/test/results/clientpositive/union19.q.out
@@ -64,7 +64,7 @@ STAGE PLANS:
           outputColumnNames: _col0
           Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+            expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator
@@ -175,7 +175,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: PARTIAL
           Select Operator
-            expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+            expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: PARTIAL
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/union20.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union20.q.out b/ql/src/test/results/clientpositive/union20.q.out
index 6cc5eff..030dc7f 100644
--- a/ql/src/test/results/clientpositive/union20.q.out
+++ b/ql/src/test/results/clientpositive/union20.q.out
@@ -52,7 +52,7 @@ STAGE PLANS:
           outputColumnNames: _col0
           Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+            expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator
@@ -163,7 +163,7 @@ STAGE PLANS:
           outputColumnNames: _col0
           Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+            expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/union32.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union32.q.out b/ql/src/test/results/clientpositive/union32.q.out
index 92ed7d1..72b2cd0 100644
--- a/ql/src/test/results/clientpositive/union32.q.out
+++ b/ql/src/test/results/clientpositive/union32.q.out
@@ -494,7 +494,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToString(CAST( _col1 AS CHAR(20))) (type: string)
+            expressions: UDFToDouble(UDFToLong(_col0)) (type: double), CAST( CAST( _col1 AS CHAR(20)) AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -651,7 +651,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToString(CAST( _col1 AS CHAR(20))) (type: string)
+            expressions: UDFToDouble(UDFToLong(_col0)) (type: double), CAST( CAST( _col1 AS CHAR(20)) AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/union33.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union33.q.out b/ql/src/test/results/clientpositive/union33.q.out
index 1b8b35b..57c5308 100644
--- a/ql/src/test/results/clientpositive/union33.q.out
+++ b/ql/src/test/results/clientpositive/union33.q.out
@@ -93,7 +93,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+            expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -355,7 +355,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+            expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/union6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union6.q.out b/ql/src/test/results/clientpositive/union6.q.out
index 37c7521..4822437 100644
--- a/ql/src/test/results/clientpositive/union6.q.out
+++ b/ql/src/test/results/clientpositive/union6.q.out
@@ -55,7 +55,7 @@ STAGE PLANS:
           outputColumnNames: _col0
           Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: 'tst1' (type: string), UDFToString(_col0) (type: string)
+            expressions: 'tst1' (type: string), CAST( _col0 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/union_remove_19.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union_remove_19.q.out b/ql/src/test/results/clientpositive/union_remove_19.q.out
index 0c67e67..4e48312 100644
--- a/ql/src/test/results/clientpositive/union_remove_19.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_19.q.out
@@ -411,7 +411,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToString((UDFToDouble(_col0) + UDFToDouble(_col0))) (type: string), _col1 (type: bigint)
+            expressions: CAST( (UDFToDouble(_col0) + UDFToDouble(_col0)) AS STRING) (type: string), _col1 (type: bigint)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -464,7 +464,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: UDFToString((UDFToDouble(_col0) + UDFToDouble(_col0))) (type: string), _col1 (type: bigint)
+            expressions: CAST( (UDFToDouble(_col0) + UDFToDouble(_col0)) AS STRING) (type: string), _col1 (type: bigint)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE
             File Output Operator


[02/11] hive git commit: HIVE-19733: RemoteSparkJobStatus#getSparkStageProgress inefficient implementation (Bharathkrishna Guruvayoor Murali, reviewed by Sahil Takiar)

Posted by se...@apache.org.
HIVE-19733: RemoteSparkJobStatus#getSparkStageProgress inefficient implementation (Bharathkrishna Guruvayoor Murali, reviewed by Sahil Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ed4fa73b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ed4fa73b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ed4fa73b

Branch: refs/heads/master-txnstats
Commit: ed4fa73ba740026ac0d4297d6a45432dc60d1073
Parents: 4e9562f
Author: Bharathkrishna Guruvayoor Murali <bh...@cloudera.com>
Authored: Mon Jul 23 18:35:04 2018 -0500
Committer: Sahil Takiar <st...@cloudera.com>
Committed: Mon Jul 23 18:35:41 2018 -0500

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/ql/ErrorMsg.java     |   3 +
 .../spark/status/impl/RemoteSparkJobStatus.java | 108 +++++++++++++------
 2 files changed, 78 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/ed4fa73b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index 06d0ed3..37bc153 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -599,6 +599,9 @@ public enum ErrorMsg {
   SPARK_GET_JOB_INFO_EXECUTIONERROR(30046, "Spark job failed in execution while getting job info due to exception {0}"),
 
   REPL_FILE_SYSTEM_OPERATION_RETRY(30047, "Replication file system operation retry expired."),
+  SPARK_GET_STAGES_INFO_TIMEOUT(30048, "Spark job GetSparkStagesInfoJob timed out after {0} seconds.", true),
+  SPARK_GET_STAGES_INFO_INTERRUPTED(30049, "Spark job GetSparkStagesInfoJob was interrupted."),
+  SPARK_GET_STAGES_INFO_EXECUTIONERROR(30050, "Spark job GetSparkStagesInfoJob failed in execution while getting job info due to exception {0}", true),
 
   //========================== 40000 range starts here ========================//
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ed4fa73b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
index 832832b..3d41443 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
@@ -45,6 +45,7 @@ import org.apache.spark.SparkStageInfo;
 import org.apache.spark.api.java.JavaFutureAction;
 
 import java.io.Serializable;
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -103,18 +104,20 @@ public class RemoteSparkJobStatus implements SparkJobStatus {
 
   @Override
   public Map<SparkStage, SparkStageProgress> getSparkStageProgress() throws HiveException {
+    List<SparkStageInfo> sparkStagesInfo = getSparkStagesInfo();
     Map<SparkStage, SparkStageProgress> stageProgresses = new HashMap<SparkStage, SparkStageProgress>();
-    for (int stageId : getStageIds()) {
-      SparkStageInfo sparkStageInfo = getSparkStageInfo(stageId);
-      if (sparkStageInfo != null && sparkStageInfo.name() != null) {
-        int runningTaskCount = sparkStageInfo.numActiveTasks();
-        int completedTaskCount = sparkStageInfo.numCompletedTasks();
-        int failedTaskCount = sparkStageInfo.numFailedTasks();
-        int totalTaskCount = sparkStageInfo.numTasks();
-        SparkStageProgress sparkStageProgress = new SparkStageProgress(
-            totalTaskCount, completedTaskCount, runningTaskCount, failedTaskCount);
-        SparkStage stage = new SparkStage(sparkStageInfo.stageId(), sparkStageInfo.currentAttemptId());
-        stageProgresses.put(stage, sparkStageProgress);
+    if (sparkStagesInfo != null) {
+      for (SparkStageInfo sparkStageInfo : sparkStagesInfo) {
+        if (sparkStageInfo != null && sparkStageInfo.name() != null) {
+          int runningTaskCount = sparkStageInfo.numActiveTasks();
+          int completedTaskCount = sparkStageInfo.numCompletedTasks();
+          int failedTaskCount = sparkStageInfo.numFailedTasks();
+          int totalTaskCount = sparkStageInfo.numTasks();
+          SparkStageProgress sparkStageProgress =
+              new SparkStageProgress(totalTaskCount, completedTaskCount, runningTaskCount, failedTaskCount);
+          SparkStage stage = new SparkStage(sparkStageInfo.stageId(), sparkStageInfo.currentAttemptId());
+          stageProgresses.put(stage, sparkStageProgress);
+        }
       }
     }
     return stageProgresses;
@@ -212,14 +215,26 @@ public class RemoteSparkJobStatus implements SparkJobStatus {
     }
   }
 
-  private SparkStageInfo getSparkStageInfo(int stageId) {
-    Future<SparkStageInfo> getStageInfo = sparkClient.run(new GetStageInfoJob(stageId));
-    try {
-      return getStageInfo.get(sparkClientTimeoutInSeconds, TimeUnit.SECONDS);
-    } catch (Throwable t) {
-      LOG.warn("Error getting stage info", t);
+  private List<SparkStageInfo> getSparkStagesInfo()throws HiveException {
+
+    Integer sparkJobId = jobHandle.getSparkJobIds().size() == 1
+        ? jobHandle.getSparkJobIds().get(0) : null;
+    if (sparkJobId == null) {
       return null;
     }
+    Future<ArrayList<SparkStageInfo>> getStagesInfo = sparkClient.run(
+        new GetSparkStagesInfoJob(jobHandle.getClientJobId(), sparkJobId));
+    try {
+      return getStagesInfo.get(sparkClientTimeoutInSeconds, TimeUnit.SECONDS);
+    } catch (TimeoutException e) {
+      throw new HiveException(e, ErrorMsg.SPARK_GET_STAGES_INFO_TIMEOUT,
+          Long.toString(sparkClientTimeoutInSeconds));
+    } catch (InterruptedException e) {
+      throw new HiveException(e, ErrorMsg.SPARK_GET_STAGES_INFO_INTERRUPTED);
+    } catch (ExecutionException e) {
+      throw new HiveException(e, ErrorMsg.SPARK_GET_STAGES_INFO_EXECUTIONERROR,
+          Throwables.getRootCause(e).getMessage());
+    }
   }
 
   public JobHandle.State getRemoteJobState() {
@@ -229,25 +244,24 @@ public class RemoteSparkJobStatus implements SparkJobStatus {
     return jobHandle.getState();
   }
 
-  private static class GetJobInfoJob implements Job<SparkJobInfo> {
+  private static class GetSparkStagesInfoJob implements Job<ArrayList<SparkStageInfo>> {
     private final String clientJobId;
     private final int sparkJobId;
 
-    private GetJobInfoJob() {
+    private GetSparkStagesInfoJob() {
       // For serialization.
       this(null, -1);
     }
 
-    GetJobInfoJob(String clientJobId, int sparkJobId) {
+    GetSparkStagesInfoJob(String clientJobId, int sparkJobId) {
       this.clientJobId = clientJobId;
       this.sparkJobId = sparkJobId;
     }
-
     @Override
-    public SparkJobInfo call(JobContext jc) throws Exception {
+    public ArrayList<SparkStageInfo> call(JobContext jc) throws Exception {
       SparkJobInfo jobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId);
       if (jobInfo == null) {
-        List<JavaFutureAction<?>> list = jc.getMonitoredJobs().get(clientJobId);
+        ArrayList<JavaFutureAction<?>> list = new ArrayList<>(jc.getMonitoredJobs().get(clientJobId));
         if (list != null && list.size() == 1) {
           JavaFutureAction<?> futureAction = list.get(0);
           if (futureAction.isDone()) {
@@ -266,25 +280,53 @@ public class RemoteSparkJobStatus implements SparkJobStatus {
       if (jobInfo == null) {
         jobInfo = getDefaultJobInfo(sparkJobId, JobExecutionStatus.UNKNOWN);
       }
-      return jobInfo;
+      ArrayList<SparkStageInfo> sparkStageInfos = new ArrayList<>();
+      int[] stageIds = jobInfo.stageIds();
+      for(Integer stageid : stageIds) {
+        SparkStageInfo stageInfo = jc.sc().statusTracker().getStageInfo(stageid);
+        sparkStageInfos.add(stageInfo);
+      }
+      return sparkStageInfos;
     }
   }
+  private static class GetJobInfoJob implements Job<SparkJobInfo> {
+    private final String clientJobId;
+    private final int sparkJobId;
 
-  private static class GetStageInfoJob implements Job<SparkStageInfo> {
-    private final int stageId;
-
-    private GetStageInfoJob() {
+    private GetJobInfoJob() {
       // For serialization.
-      this(-1);
+      this(null, -1);
     }
 
-    GetStageInfoJob(int stageId) {
-      this.stageId = stageId;
+    GetJobInfoJob(String clientJobId, int sparkJobId) {
+      this.clientJobId = clientJobId;
+      this.sparkJobId = sparkJobId;
     }
 
     @Override
-    public SparkStageInfo call(JobContext jc) throws Exception {
-      return jc.sc().statusTracker().getStageInfo(stageId);
+    public SparkJobInfo call(JobContext jc) throws Exception {
+      SparkJobInfo jobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId);
+      if (jobInfo == null) {
+        List<JavaFutureAction<?>> list = jc.getMonitoredJobs().get(clientJobId);
+        if (list != null && list.size() == 1) {
+          JavaFutureAction<?> futureAction = list.get(0);
+          if (futureAction.isDone()) {
+            boolean futureSucceed = true;
+            try {
+              futureAction.get();
+            } catch (Exception e) {
+              LOG.error("Failed to run job " + sparkJobId, e);
+              futureSucceed = false;
+            }
+            jobInfo = getDefaultJobInfo(sparkJobId,
+                futureSucceed ? JobExecutionStatus.SUCCEEDED : JobExecutionStatus.FAILED);
+          }
+        }
+      }
+      if (jobInfo == null) {
+        jobInfo = getDefaultJobInfo(sparkJobId, JobExecutionStatus.UNKNOWN);
+      }
+      return jobInfo;
     }
   }
 


[08/11] hive git commit: HIVE-20082: HiveDecimal to string conversion doesn't format the decimal correctly (Jason Dere, reviewed by Ashutosh Chauhan)

Posted by se...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out b/ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out
index 254ac14..178eb94 100644
--- a/ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out
+++ b/ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out
@@ -80,7 +80,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -309,7 +309,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+            expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -680,7 +680,7 @@ STAGE PLANS:
           mode: mergepartial
           outputColumnNames: _col0, _col1
           Select Operator
-            expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+            expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
             outputColumnNames: _col0, _col1
             File Output Operator
               compressed: false

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/infer_bucket_sort_num_buckets.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/infer_bucket_sort_num_buckets.q.out b/ql/src/test/results/clientpositive/infer_bucket_sort_num_buckets.q.out
index f8445e6..4062fca 100644
--- a/ql/src/test/results/clientpositive/infer_bucket_sort_num_buckets.q.out
+++ b/ql/src/test/results/clientpositive/infer_bucket_sort_num_buckets.q.out
@@ -60,7 +60,7 @@ STAGE PLANS:
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: default.test_table_n0
           Select Operator
-            expressions: _col0 (type: int), _col1 (type: string), UDFToString(_col2) (type: string)
+            expressions: _col0 (type: int), _col1 (type: string), CAST( _col2 AS STRING) (type: string)
             outputColumnNames: key, value, hr
             Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
             Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/input_part10.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input_part10.q.out b/ql/src/test/results/clientpositive/input_part10.q.out
index 4484cd0..4038a55 100644
--- a/ql/src/test/results/clientpositive/input_part10.q.out
+++ b/ql/src/test/results/clientpositive/input_part10.q.out
@@ -60,7 +60,7 @@ STAGE PLANS:
             Number of rows: 1
             Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
             Select Operator
-              expressions: UDFToString(_col0) (type: string), UDFToString(_col1) (type: string)
+              expressions: CAST( _col0 AS STRING) (type: string), CAST( _col1 AS STRING) (type: string)
               outputColumnNames: _col0, _col1
               Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: COMPLETE
               File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/bucketmapjoin1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/bucketmapjoin1.q.out b/ql/src/test/results/clientpositive/llap/bucketmapjoin1.q.out
index e9c84b2..c70bb07 100644
--- a/ql/src/test/results/clientpositive/llap/bucketmapjoin1.q.out
+++ b/ql/src/test/results/clientpositive/llap/bucketmapjoin1.q.out
@@ -601,7 +601,7 @@ STAGE PLANS:
                 Position of Big Table: 1
                 Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator
@@ -1035,7 +1035,7 @@ STAGE PLANS:
                 Position of Big Table: 1
                 Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/bucketmapjoin2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/bucketmapjoin2.q.out b/ql/src/test/results/clientpositive/llap/bucketmapjoin2.q.out
index 7532e74..95ad77b 100644
--- a/ql/src/test/results/clientpositive/llap/bucketmapjoin2.q.out
+++ b/ql/src/test/results/clientpositive/llap/bucketmapjoin2.q.out
@@ -296,7 +296,7 @@ STAGE PLANS:
                 Position of Big Table: 0
                 Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator
@@ -735,7 +735,7 @@ STAGE PLANS:
                 Position of Big Table: 0
                 Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator
@@ -1241,7 +1241,7 @@ STAGE PLANS:
                 Position of Big Table: 1
                 Statistics: Num rows: 163 Data size: 93968 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 163 Data size: 93968 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/bucketmapjoin3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/bucketmapjoin3.q.out b/ql/src/test/results/clientpositive/llap/bucketmapjoin3.q.out
index 02169d8..bb6386e 100644
--- a/ql/src/test/results/clientpositive/llap/bucketmapjoin3.q.out
+++ b/ql/src/test/results/clientpositive/llap/bucketmapjoin3.q.out
@@ -320,7 +320,7 @@ STAGE PLANS:
                 Position of Big Table: 1
                 Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col4 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col4 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator
@@ -759,7 +759,7 @@ STAGE PLANS:
                 Position of Big Table: 1
                 Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col4 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col4 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/bucketmapjoin4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/bucketmapjoin4.q.out b/ql/src/test/results/clientpositive/llap/bucketmapjoin4.q.out
index 42b95cf..58ea699 100644
--- a/ql/src/test/results/clientpositive/llap/bucketmapjoin4.q.out
+++ b/ql/src/test/results/clientpositive/llap/bucketmapjoin4.q.out
@@ -318,7 +318,7 @@ STAGE PLANS:
                 Position of Big Table: 0
                 Statistics: Num rows: 1 Data size: 206 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 1 Data size: 206 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -743,7 +743,7 @@ STAGE PLANS:
                 Position of Big Table: 0
                 Statistics: Num rows: 1 Data size: 206 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 1 Data size: 206 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/default_constraint.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/default_constraint.q.out b/ql/src/test/results/clientpositive/llap/default_constraint.q.out
index f93eb1f..f188ba4 100644
--- a/ql/src/test/results/clientpositive/llap/default_constraint.q.out
+++ b/ql/src/test/results/clientpositive/llap/default_constraint.q.out
@@ -1151,7 +1151,7 @@ STAGE PLANS:
                       Statistics: Num rows: 1 Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE
                       function name: inline
                       Select Operator
-                        expressions: UDFToInteger(UDFToDouble(4)) (type: int), UDFToBoolean('true') (type: boolean), UDFToInteger(5.67) (type: int), UDFToByte(45) (type: tinyint), UDFToFloat(45.4) (type: float), UDFToLong(567) (type: bigint), UDFToShort(88) (type: smallint), CAST( CURRENT_TIMESTAMP() AS varchar(50)) (type: varchar(50)), UDFToString(CAST( CURRENT_USER() AS varchar(50))) (type: string), CAST( '2016-01-03 12:26:34 America/Los_Angeles' AS timestamp with local time zone) (type: timestamp with local time zone), CAST( '2016-01-01 12:01:01' AS TIMESTAMP) (type: timestamp), CAST( 4.5 AS decimal(8,2)) (type: decimal(8,2)), UDFToDouble(5) (type: double), CAST( col1 AS CHAR(2)) (type: char(2))
+                        expressions: UDFToInteger(UDFToDouble(4)) (type: int), UDFToBoolean('true') (type: boolean), UDFToInteger(5.67) (type: int), UDFToByte(45) (type: tinyint), UDFToFloat(45.4) (type: float), UDFToLong(567) (type: bigint), UDFToShort(88) (type: smallint), CAST( CURRENT_TIMESTAMP() AS varchar(50)) (type: varchar(50)), CAST( CAST( CURRENT_USER() AS varchar(50)) AS STRING) (type: string), CAST( '2016-01-03 12:26:34 America/Los_Angeles' AS timestamp with local time zone) (type: timestamp with local time zone), CAST( '2016-01-01 12:01:01' AS TIMESTAMP) (type: timestamp), CAST( 4.5 AS decimal(8,2)) (type: decimal(8,2)), UDFToDouble(5) (type: double), CAST( col1 AS CHAR(2)) (type: char(2))
                         outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
                         Statistics: Num rows: 1 Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE
                         File Output Operator
@@ -1237,7 +1237,7 @@ STAGE PLANS:
                       Statistics: Num rows: 1 Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE
                       function name: inline
                       Select Operator
-                        expressions: UDFToInteger(UDFToDouble(4)) (type: int), col1 (type: boolean), UDFToInteger(5.67) (type: int), UDFToByte(45) (type: tinyint), UDFToFloat(45.4) (type: float), UDFToLong(567) (type: bigint), UDFToShort(88) (type: smallint), CAST( CURRENT_TIMESTAMP() AS varchar(50)) (type: varchar(50)), UDFToString(CAST( CURRENT_USER() AS varchar(50))) (type: string), CAST( '2016-01-03 12:26:34 America/Los_Angeles' AS timestamp with local time zone) (type: timestamp with local time zone), CAST( '2016-01-01 12:01:01' AS TIMESTAMP) (type: timestamp), CAST( 4.5 AS decimal(8,2)) (type: decimal(8,2)), UDFToDouble(5) (type: double), CAST( col2 AS CHAR(2)) (type: char(2))
+                        expressions: UDFToInteger(UDFToDouble(4)) (type: int), col1 (type: boolean), UDFToInteger(5.67) (type: int), UDFToByte(45) (type: tinyint), UDFToFloat(45.4) (type: float), UDFToLong(567) (type: bigint), UDFToShort(88) (type: smallint), CAST( CURRENT_TIMESTAMP() AS varchar(50)) (type: varchar(50)), CAST( CAST( CURRENT_USER() AS varchar(50)) AS STRING) (type: string), CAST( '2016-01-03 12:26:34 America/Los_Angeles' AS timestamp with local time zone) (type: timestamp with local time zone), CAST( '2016-01-01 12:01:01' AS TIMESTAMP) (type: timestamp), CAST( 4.5 AS decimal(8,2)) (type: decimal(8,2)), UDFToDouble(5) (type: double), CAST( col2 AS CHAR(2)) (type: char(2))
                         outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
                         Statistics: Num rows: 1 Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE
                         File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out
index 1133b54..8f06ee5 100644
--- a/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out
+++ b/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out
@@ -2099,9 +2099,9 @@ STAGE PLANS:
                     outputColumnNames: _col0
                     Statistics: Num rows: 2000 Data size: 368000 Basic stats: COMPLETE Column stats: COMPLETE
                     Reduce Output Operator
-                      key expressions: UDFToString((UDFToDouble(_col0) * 2.0D)) (type: string)
+                      key expressions: CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string)
                       sort order: +
-                      Map-reduce partition columns: UDFToString((UDFToDouble(_col0) * 2.0D)) (type: string)
+                      Map-reduce partition columns: CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string)
                       Statistics: Num rows: 2000 Data size: 368000 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
@@ -2119,12 +2119,12 @@ STAGE PLANS:
                       outputColumnNames: _col0
                       Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: UDFToString(_col0) (type: string)
+                        key expressions: CAST( _col0 AS STRING) (type: string)
                         sort order: +
-                        Map-reduce partition columns: UDFToString(_col0) (type: string)
+                        Map-reduce partition columns: CAST( _col0 AS STRING) (type: string)
                         Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
-                        expressions: UDFToString(_col0) (type: string)
+                        expressions: CAST( _col0 AS STRING) (type: string)
                         outputColumnNames: _col0
                         Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE
                         Group By Operator
@@ -2135,7 +2135,7 @@ STAGE PLANS:
                           Dynamic Partitioning Event Operator
                             Target column: hr (string)
                             Target Input: srcpart
-                            Partition key expr: UDFToString((UDFToDouble(hr) * 2.0D))
+                            Partition key expr: CAST( (UDFToDouble(hr) * 2.0D) AS STRING)
                             Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE
                             Target Vertex: Map 1
             Execution mode: llap
@@ -2147,8 +2147,8 @@ STAGE PLANS:
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 UDFToString((UDFToDouble(_col0) * 2.0D)) (type: string)
-                  1 UDFToString(_col0) (type: string)
+                  0 CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string)
+                  1 CAST( _col0 AS STRING) (type: string)
                 Statistics: Num rows: 2200 Data size: 404800 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   aggregations: count()

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out b/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out
index 4c82e09..29b7262 100644
--- a/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out
+++ b/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out
@@ -59,7 +59,7 @@ STAGE PLANS:
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
-                expressions: UDFToInteger(VALUE._col0) (type: int), UDFToString(VALUE._col0) (type: string)
+                expressions: UDFToInteger(VALUE._col0) (type: int), CAST( VALUE._col0 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
@@ -194,7 +194,7 @@ STAGE PLANS:
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) (type: string)
+                expressions: VALUE._col0 (type: int), CAST( VALUE._col1 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
@@ -329,7 +329,7 @@ STAGE PLANS:
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) (type: string)
+                expressions: VALUE._col0 (type: int), CAST( VALUE._col1 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
@@ -600,7 +600,7 @@ STAGE PLANS:
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) (type: string)
+                expressions: VALUE._col0 (type: int), CAST( VALUE._col1 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
@@ -753,7 +753,7 @@ STAGE PLANS:
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) (type: string)
+                expressions: VALUE._col0 (type: int), CAST( VALUE._col1 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
@@ -888,7 +888,7 @@ STAGE PLANS:
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) (type: string)
+                expressions: VALUE._col0 (type: int), CAST( VALUE._col1 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
@@ -1023,7 +1023,7 @@ STAGE PLANS:
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) (type: string)
+                expressions: VALUE._col0 (type: int), CAST( VALUE._col1 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
@@ -1294,7 +1294,7 @@ STAGE PLANS:
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) (type: string)
+                expressions: VALUE._col0 (type: int), CAST( VALUE._col1 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
@@ -1429,7 +1429,7 @@ STAGE PLANS:
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) (type: string)
+                expressions: VALUE._col0 (type: int), CAST( VALUE._col1 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
@@ -1564,7 +1564,7 @@ STAGE PLANS:
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) (type: string)
+                expressions: VALUE._col0 (type: int), CAST( VALUE._col1 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_6.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_6.q.out
index 8a52138..eff436e 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_6.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_6.q.out
@@ -826,10 +826,10 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: default.mv1
-                  filterExpr: (UDFToString(name1) = 'Bill') (type: boolean)
+                  filterExpr: (CAST( name1 AS STRING) = 'Bill') (type: boolean)
                   Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
-                    predicate: (UDFToString(name1) = 'Bill') (type: boolean)
+                    predicate: (CAST( name1 AS STRING) = 'Bill') (type: boolean)
                     Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: empid (type: int), deptno (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/multi_insert_lateral_view.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/multi_insert_lateral_view.q.out b/ql/src/test/results/clientpositive/llap/multi_insert_lateral_view.q.out
index 74a6945..041beca 100644
--- a/ql/src/test/results/clientpositive/llap/multi_insert_lateral_view.q.out
+++ b/ql/src/test/results/clientpositive/llap/multi_insert_lateral_view.q.out
@@ -76,7 +76,7 @@ STAGE PLANS:
                         outputColumnNames: _col0, _col5
                         Statistics: Num rows: 20 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: string), UDFToString(_col5) (type: string)
+                          expressions: _col0 (type: string), CAST( _col5 AS STRING) (type: string)
                           outputColumnNames: _col0, _col1
                           Statistics: Num rows: 20 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                           File Output Operator
@@ -111,7 +111,7 @@ STAGE PLANS:
                           outputColumnNames: _col0, _col5
                           Statistics: Num rows: 20 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                           Select Operator
-                            expressions: _col0 (type: string), UDFToString(_col5) (type: string)
+                            expressions: _col0 (type: string), CAST( _col5 AS STRING) (type: string)
                             outputColumnNames: _col0, _col1
                             Statistics: Num rows: 20 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                             File Output Operator
@@ -145,7 +145,7 @@ STAGE PLANS:
                         outputColumnNames: _col0, _col5
                         Statistics: Num rows: 20 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: string), UDFToString(_col5) (type: string)
+                          expressions: _col0 (type: string), CAST( _col5 AS STRING) (type: string)
                           outputColumnNames: _col0, _col1
                           Statistics: Num rows: 20 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                           File Output Operator
@@ -180,7 +180,7 @@ STAGE PLANS:
                           outputColumnNames: _col0, _col5
                           Statistics: Num rows: 20 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                           Select Operator
-                            expressions: _col0 (type: string), UDFToString(_col5) (type: string)
+                            expressions: _col0 (type: string), CAST( _col5 AS STRING) (type: string)
                             outputColumnNames: _col0, _col1
                             Statistics: Num rows: 20 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                             File Output Operator
@@ -482,7 +482,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 1840 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 1840 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -531,7 +531,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 1840 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 1840 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -767,7 +767,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -821,7 +821,7 @@ STAGE PLANS:
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                      expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
@@ -855,7 +855,7 @@ STAGE PLANS:
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                      expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
@@ -1184,7 +1184,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), UDFToString(_col1) (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -1233,7 +1233,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), UDFToString(_col1) (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -1282,7 +1282,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 5 Data size: 1840 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 5 Data size: 1840 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -1632,7 +1632,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -1681,7 +1681,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 10 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                  expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 10 Data size: 3680 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
@@ -1735,7 +1735,7 @@ STAGE PLANS:
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                      expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
@@ -1769,7 +1769,7 @@ STAGE PLANS:
                     outputColumnNames: _col0, _col1
                     Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: _col0 (type: string), UDFToString(_col1) (type: string)
+                      expressions: _col0 (type: string), CAST( _col1 AS STRING) (type: string)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/orc_merge1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge1.q.out b/ql/src/test/results/clientpositive/llap/orc_merge1.q.out
index 7189964..4eb6617 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge1.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge1.q.out
@@ -82,7 +82,7 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                           name: default.orcfile_merge1_n1
                     Select Operator
-                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                       outputColumnNames: key, value, ds, part
                       Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator
@@ -215,7 +215,7 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                           name: default.orcfile_merge1b_n1
                     Select Operator
-                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                       outputColumnNames: key, value, ds, part
                       Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator
@@ -388,7 +388,7 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                           name: default.orcfile_merge1c_n1
                     Select Operator
-                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                       outputColumnNames: key, value, ds, part
                       Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/orc_merge10.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge10.q.out b/ql/src/test/results/clientpositive/llap/orc_merge10.q.out
index 0f9bb2d..699cbde 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge10.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge10.q.out
@@ -82,7 +82,7 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                           name: default.orcfile_merge1
                     Select Operator
-                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                       outputColumnNames: key, value, ds, part
                       Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator
@@ -212,7 +212,7 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                           name: default.orcfile_merge1b
                     Select Operator
-                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                       outputColumnNames: key, value, ds, part
                       Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator
@@ -385,7 +385,7 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                           name: default.orcfile_merge1c
                     Select Operator
-                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                       outputColumnNames: key, value, ds, part
                       Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/orc_merge2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge2.q.out b/ql/src/test/results/clientpositive/llap/orc_merge2.q.out
index b6e4dd6..ccc49c9 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge2.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge2.q.out
@@ -56,7 +56,7 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                           name: default.orcfile_merge2a_n0
                     Select Operator
-                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string), UDFToString(_col3) (type: string)
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string), CAST( _col3 AS STRING) (type: string)
                       outputColumnNames: key, value, one, two, three
                       Statistics: Num rows: 500 Data size: 274000 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/orc_merge_diff_fs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge_diff_fs.q.out b/ql/src/test/results/clientpositive/llap/orc_merge_diff_fs.q.out
index 751bb40..1cc22c4 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge_diff_fs.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge_diff_fs.q.out
@@ -82,7 +82,7 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                           name: default.orcfile_merge1_n0
                     Select Operator
-                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                       outputColumnNames: key, value, ds, part
                       Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator
@@ -210,7 +210,7 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                           name: default.orcfile_merge1b_n0
                     Select Operator
-                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                       outputColumnNames: key, value, ds, part
                       Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator
@@ -383,7 +383,7 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
                           name: default.orcfile_merge1c_n0
                     Select Operator
-                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string)
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string)
                       outputColumnNames: key, value, ds, part
                       Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/rcfile_merge2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/rcfile_merge2.q.out b/ql/src/test/results/clientpositive/llap/rcfile_merge2.q.out
index afdc8f8..51a16b8 100644
--- a/ql/src/test/results/clientpositive/llap/rcfile_merge2.q.out
+++ b/ql/src/test/results/clientpositive/llap/rcfile_merge2.q.out
@@ -56,7 +56,7 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
                           name: default.rcfile_merge2a
                     Select Operator
-                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), UDFToString(_col2) (type: string), UDFToString(_col3) (type: string)
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), CAST( _col2 AS STRING) (type: string), CAST( _col3 AS STRING) (type: string)
                       outputColumnNames: key, value, one, two, three
                       Statistics: Num rows: 500 Data size: 274000 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part.q.out
index ca2c908..055824a 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part.q.out
@@ -941,8 +941,8 @@ insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	b
 101	1	99999999999999999999.999999999999999999	Infinity	1.7976931348623157E308	99999999999999999999.999999999999999999           	Infinity                                          	1.7976931348623157E308                            	9999999	Infinit	1.79769	99999999999999999999.999999999999999999	Infinity	1.7976931348623157E308	9999999	Infinit	1.79769	original
 102	1	-99999999999999999999.999999999999999999	-Infinity	-1.7976931348623157E308	-99999999999999999999.999999999999999999          	-Infinity                                         	-1.7976931348623157E308                           	-999999	-Infini	-1.7976	-99999999999999999999.999999999999999999	-Infinity	-1.7976931348623157E308	-999999	-Infini	-1.7976	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	66475.561431	-100.35978	30.774	66475.561431                                      	-100.35978                                        	30.774                                            	66475.5	-100.35	30.774 	66475.561431	-100.35978	30.774	66475.5	-100.35	30.774	original
-105	1	9250340.75	NULL	46114.28	9250340.75                                        	NULL	46114.28                                          	9250340	NULL	46114.2	9250340.75	NULL	46114.28	9250340	NULL	46114.2	original
+104	1	66475.561431000000000000	-100.35978	30.774	66475.561431000000000000                          	-100.35978                                        	30.774                                            	66475.5	-100.35	30.774 	66475.561431000000000000	-100.35978	30.774	66475.5	-100.35	30.774	original
+105	1	9250340.750000000000000000	NULL	46114.28	9250340.750000000000000000                        	NULL	46114.28                                          	9250340	NULL	46114.2	9250340.750000000000000000	NULL	46114.28	9250340	NULL	46114.2	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group_n8
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex.q.out
index 4cb2ee8..2b0135d 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex.q.out
@@ -188,12 +188,12 @@ POSTHOOK: Input: default@part_change_various_various_struct1_n3@part=1
 POSTHOOK: Input: default@part_change_various_various_struct1_n3@part=2
 #### A masked pattern was here ####
 insert_num	part	s1	b
-1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"n)گ"}	original
-2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"n)گ"}	original
-3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}	original
-4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}	original
+1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135.000000000000000000","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"n)گ"}	original
+2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488000000000","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"n)گ"}	original
+3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431000000000000","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}	original
+4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.750000000000000000","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}	original
 5	2	{"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}	new
-6	1	{"c1":"FALSE","c2":"-67","c3":"833","c4":"63993","c5":"1255178165","c6":"905071.0","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":null,"c12":"2016-03-07","c13":"n)گ"}	new
+6	1	{"c1":"FALSE","c2":"-67","c3":"833","c4":"63993","c5":"1255178165","c6":"905071.0","c7":"-4314.7918","c8":"-1240033819.000000000000000000","c9":"trial","c10":"trial","c11":null,"c12":"2016-03-07","c13":"n)گ"}	new
 PREHOOK: query: drop table part_change_various_various_struct1_n3
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_struct1_n3
@@ -470,9 +470,9 @@ insert_num	part	b	s2
 2	1	original	NULL
 3	1	new	NULL
 4	1	new	NULL
-5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}
-6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}
-7	2	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"-100","c5":"953967041","c6":"62.07915","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":null,"c12":null,"c13":"n)گ"}
+5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431000000000000","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}
+6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.750000000000000000","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}
+7	2	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"-100","c5":"953967041","c6":"62.07915","c7":"718.78","c8":"1.000000000000000000","c9":"verdict","c10":"verdict","c11":null,"c12":null,"c13":"n)گ"}
 8	1	new	NULL
 PREHOOK: query: drop table part_add_various_various_struct2_n3
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex_llap_io.q.out
index 9248712..aad6b57 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_complex_llap_io.q.out
@@ -189,12 +189,12 @@ POSTHOOK: Input: default@part_change_various_various_struct1_n1@part=1
 POSTHOOK: Input: default@part_change_various_various_struct1_n1@part=2
 #### A masked pattern was here ####
 insert_num	part	s1	b
-1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"n)گ"}	original
-2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"n)گ"}	original
-3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}	original
-4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}	original
+1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135.000000000000000000","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"n)گ"}	original
+2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488000000000","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"n)گ"}	original
+3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431000000000000","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}	original
+4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.750000000000000000","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}	original
 5	2	{"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}	new
-6	1	{"c1":"FALSE","c2":"-67","c3":"833","c4":"63993","c5":"1255178165","c6":"905071.0","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":null,"c12":"2016-03-07","c13":"n)گ"}	new
+6	1	{"c1":"FALSE","c2":"-67","c3":"833","c4":"63993","c5":"1255178165","c6":"905071.0","c7":"-4314.7918","c8":"-1240033819.000000000000000000","c9":"trial","c10":"trial","c11":null,"c12":"2016-03-07","c13":"n)گ"}	new
 PREHOOK: query: drop table part_change_various_various_struct1_n1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_struct1_n1
@@ -472,9 +472,9 @@ insert_num	part	b	s2
 2	1	original	NULL
 3	1	new	NULL
 4	1	new	NULL
-5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}
-6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}
-7	2	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"-100","c5":"953967041","c6":"62.07915","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":null,"c12":null,"c13":"n)گ"}
+5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431000000000000","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}
+6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.750000000000000000","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}
+7	2	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"-100","c5":"953967041","c6":"62.07915","c7":"718.78","c8":"1.000000000000000000","c9":"verdict","c10":"verdict","c11":null,"c12":null,"c13":"n)گ"}
 8	1	new	NULL
 PREHOOK: query: drop table part_add_various_various_struct2_n1
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_llap_io.q.out
index fcbd8e8..7331225 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_llap_io.q.out
@@ -947,8 +947,8 @@ insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	b
 101	1	99999999999999999999.999999999999999999	Infinity	1.7976931348623157E308	99999999999999999999.999999999999999999           	Infinity                                          	1.7976931348623157E308                            	9999999	Infinit	1.79769	99999999999999999999.999999999999999999	Infinity	1.7976931348623157E308	9999999	Infinit	1.79769	original
 102	1	-99999999999999999999.999999999999999999	-Infinity	-1.7976931348623157E308	-99999999999999999999.999999999999999999          	-Infinity                                         	-1.7976931348623157E308                           	-999999	-Infini	-1.7976	-99999999999999999999.999999999999999999	-Infinity	-1.7976931348623157E308	-999999	-Infini	-1.7976	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	66475.561431	-100.35978	30.774	66475.561431                                      	-100.35978                                        	30.774                                            	66475.5	-100.35	30.774 	66475.561431	-100.35978	30.774	66475.5	-100.35	30.774	original
-105	1	9250340.75	NULL	46114.28	9250340.75                                        	NULL	46114.28                                          	9250340	NULL	46114.2	9250340.75	NULL	46114.28	9250340	NULL	46114.2	original
+104	1	66475.561431000000000000	-100.35978	30.774	66475.561431000000000000                          	-100.35978                                        	30.774                                            	66475.5	-100.35	30.774 	66475.561431000000000000	-100.35978	30.774	66475.5	-100.35	30.774	original
+105	1	9250340.750000000000000000	NULL	46114.28	9250340.750000000000000000                        	NULL	46114.28                                          	9250340	NULL	46114.2	9250340.750000000000000000	NULL	46114.28	9250340	NULL	46114.2	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group_n1
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part.q.out
index fed044e..a0d81b6 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part.q.out
@@ -1097,8 +1097,8 @@ insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	b
 101	1	99999999999999999999.999999999999999999	Infinity	1.7976931348623157E308	99999999999999999999.999999999999999999           	Infinity                                          	1.7976931348623157E308                            	9999999	Infinit	1.79769	99999999999999999999.999999999999999999	Infinity	1.7976931348623157E308	9999999	Infinit	1.79769	original
 102	1	-99999999999999999999.999999999999999999	-Infinity	-1.7976931348623157E308	-99999999999999999999.999999999999999999          	-Infinity                                         	-1.7976931348623157E308                           	-999999	-Infini	-1.7976	-99999999999999999999.999999999999999999	-Infinity	-1.7976931348623157E308	-999999	-Infini	-1.7976	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	66475.561431	-100.35978	30.774	66475.561431                                      	-100.35978                                        	30.774                                            	66475.5	-100.35	30.774 	66475.561431	-100.35978	30.774	66475.5	-100.35	30.774	original
-105	1	9250340.75	NULL	46114.28	9250340.75                                        	NULL	46114.28                                          	9250340	NULL	46114.2	9250340.75	NULL	46114.28	9250340	NULL	46114.2	original
+104	1	66475.561431000000000000	-100.35978	30.774	66475.561431000000000000                          	-100.35978                                        	30.774                                            	66475.5	-100.35	30.774 	66475.561431000000000000	-100.35978	30.774	66475.5	-100.35	30.774	original
+105	1	9250340.750000000000000000	NULL	46114.28	9250340.750000000000000000                        	NULL	46114.28                                          	9250340	NULL	46114.2	9250340.750000000000000000	NULL	46114.28	9250340	NULL	46114.2	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group_n10
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex.q.out
index 5d6b08b..f31c200 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_complex.q.out
@@ -214,12 +214,12 @@ POSTHOOK: Input: default@part_change_various_various_struct1_n6@part=1
 POSTHOOK: Input: default@part_change_various_various_struct1_n6@part=2
 #### A masked pattern was here ####
 insert_num	part	s1	b
-1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"n)گ"}	original
-2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"n)گ"}	original
-3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}	original
-4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}	original
+1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135.000000000000000000","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"n)گ"}	original
+2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488000000000","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"n)گ"}	original
+3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431000000000000","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}	original
+4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.750000000000000000","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}	original
 5	2	{"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}	new
-6	1	{"c1":"FALSE","c2":"-67","c3":"833","c4":"63993","c5":"1255178165","c6":"905071.0","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":null,"c12":"2016-03-07","c13":"n)گ"}	new
+6	1	{"c1":"FALSE","c2":"-67","c3":"833","c4":"63993","c5":"1255178165","c6":"905071.0","c7":"-4314.7918","c8":"-1240033819.000000000000000000","c9":"trial","c10":"trial","c11":null,"c12":"2016-03-07","c13":"n)گ"}	new
 PREHOOK: query: drop table part_change_various_various_struct1_n6
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_struct1_n6
@@ -522,9 +522,9 @@ insert_num	part	b	s2
 2	1	original	NULL
 3	1	new	NULL
 4	1	new	NULL
-5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}
-6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}
-7	2	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"-100","c5":"953967041","c6":"62.07915","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":null,"c12":null,"c13":"n)گ"}
+5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431000000000000","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}
+6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.750000000000000000","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}
+7	2	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"-100","c5":"953967041","c6":"62.07915","c7":"718.78","c8":"1.000000000000000000","c9":"verdict","c10":"verdict","c11":null,"c12":null,"c13":"n)گ"}
 8	1	new	NULL
 PREHOOK: query: drop table part_add_various_various_struct2_n6
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part.q.out
index d71107d..a9db624 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part.q.out
@@ -1097,8 +1097,8 @@ insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	b
 101	1	99999999999999999999.999999999999999999	Infinity	1.7976931348623157E308	99999999999999999999.999999999999999999           	Infinity                                          	1.7976931348623157E308                            	9999999	Infinit	1.79769	99999999999999999999.999999999999999999	Infinity	1.7976931348623157E308	9999999	Infinit	1.79769	original
 102	1	-99999999999999999999.999999999999999999	-Infinity	-1.7976931348623157E308	-99999999999999999999.999999999999999999          	-Infinity                                         	-1.7976931348623157E308                           	-999999	-Infini	-1.7976	-99999999999999999999.999999999999999999	-Infinity	-1.7976931348623157E308	-999999	-Infini	-1.7976	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	66475.561431	-100.35978	30.774	66475.561431                                      	-100.35978                                        	30.774                                            	66475.5	-100.35	30.774 	66475.561431	-100.35978	30.774	66475.5	-100.35	30.774	original
-105	1	9250340.75	NULL	46114.28	9250340.75                                        	NULL	46114.28                                          	9250340	NULL	46114.2	9250340.75	NULL	46114.28	9250340	NULL	46114.2	original
+104	1	66475.561431000000000000	-100.35978	30.774	66475.561431000000000000                          	-100.35978                                        	30.774                                            	66475.5	-100.35	30.774 	66475.561431000000000000	-100.35978	30.774	66475.5	-100.35	30.774	original
+105	1	9250340.750000000000000000	NULL	46114.28	9250340.750000000000000000                        	NULL	46114.28                                          	9250340	NULL	46114.2	9250340.750000000000000000	NULL	46114.28	9250340	NULL	46114.2	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group_n11
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_complex.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_complex.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_complex.q.out
index 5208183..674da22 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_complex.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_complex.q.out
@@ -214,12 +214,12 @@ POSTHOOK: Input: default@part_change_various_various_struct1_n4@part=1
 POSTHOOK: Input: default@part_change_various_various_struct1_n4@part=2
 #### A masked pattern was here ####
 insert_num	part	s1	b
-1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"n)گ"}	original
-2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"n)گ"}	original
-3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}	original
-4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}	original
+1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135.000000000000000000","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"n)گ"}	original
+2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488000000000","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"n)گ"}	original
+3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431000000000000","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}	original
+4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.750000000000000000","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}	original
 5	2	{"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}	new
-6	1	{"c1":"FALSE","c2":"-67","c3":"833","c4":"63993","c5":"1255178165","c6":"905071.0","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":null,"c12":"2016-03-07","c13":"n)گ"}	new
+6	1	{"c1":"FALSE","c2":"-67","c3":"833","c4":"63993","c5":"1255178165","c6":"905071.0","c7":"-4314.7918","c8":"-1240033819.000000000000000000","c9":"trial","c10":"trial","c11":null,"c12":"2016-03-07","c13":"n)گ"}	new
 PREHOOK: query: drop table part_change_various_various_struct1_n4
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_struct1_n4
@@ -522,9 +522,9 @@ insert_num	part	b	s2
 2	1	original	NULL
 3	1	new	NULL
 4	1	new	NULL
-5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}
-6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}
-7	2	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"-100","c5":"953967041","c6":"62.07915","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":null,"c12":null,"c13":"n)گ"}
+5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431000000000000","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"n)گ"}
+6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.750000000000000000","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"n)گ"}
+7	2	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"-100","c5":"953967041","c6":"62.07915","c7":"718.78","c8":"1.000000000000000000","c9":"verdict","c10":"verdict","c11":null,"c12":null,"c13":"n)گ"}
 8	1	new	NULL
 PREHOOK: query: drop table part_add_various_various_struct2_n4
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/stats11.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/stats11.q.out b/ql/src/test/results/clientpositive/llap/stats11.q.out
index 8b59f72..5b8e18c 100644
--- a/ql/src/test/results/clientpositive/llap/stats11.q.out
+++ b/ql/src/test/results/clientpositive/llap/stats11.q.out
@@ -490,7 +490,7 @@ STAGE PLANS:
                 Position of Big Table: 1
                 Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator
@@ -924,7 +924,7 @@ STAGE PLANS:
                 Position of Big Table: 1
                 Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 156 Data size: 89111 Basic stats: PARTIAL Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/26f4d8ea/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/subquery_multi.q.out b/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
index fb9cfd2..89883fd 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
@@ -1026,7 +1026,7 @@ STAGE PLANS:
                      Left Outer Join 0 to 1
                 keys:
                   0 _col1 (type: string)
-                  1 UDFToString(_col0) (type: string)
+                  1 CAST( _col0 AS STRING) (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col12
                 Statistics: Num rows: 1 Data size: 1345 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
@@ -1085,9 +1085,9 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 86 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
-                    key expressions: UDFToString(_col0) (type: string)
+                    key expressions: CAST( _col0 AS STRING) (type: string)
                     sort order: +
-                    Map-reduce partition columns: UDFToString(_col0) (type: string)
+                    Map-reduce partition columns: CAST( _col0 AS STRING) (type: string)
                     Statistics: Num rows: 1 Data size: 86 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col1 (type: boolean)