You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2020/07/30 14:10:12 UTC

[hive] branch master updated: HIVE-23892: Remove interpretation for character RexLiteral (Jesus Camacho Rodriguez, reviewed by Zoltan Haindrich)

This is an automated email from the ASF dual-hosted git repository.

jcamacho pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 9a77675  HIVE-23892: Remove interpretation for character RexLiteral (Jesus Camacho Rodriguez, reviewed by Zoltan Haindrich)
9a77675 is described below

commit 9a77675d35c35b6595b407b4925c80b8d964e6ff
Author: Jesús Camacho Rodríguez <jc...@apache.org>
AuthorDate: Thu Jul 30 07:09:56 2020 -0700

    HIVE-23892: Remove interpretation for character RexLiteral (Jesus Camacho Rodriguez, reviewed by Zoltan Haindrich)
    
    Closes apache/hive#1305
---
 .../hive/ql/optimizer/calcite/HiveCalciteUtil.java |  8 ----
 .../calcite/translator/ExprNodeConverter.java      | 55 ++++++++++------------
 .../calcite/translator/RexNodeConverter.java       | 27 ++++++++---
 .../calcite/translator/TypeConverter.java          | 26 ----------
 .../hive/ql/parse/type/RexNodeExprFactory.java     | 38 ++++++---------
 ql/src/test/queries/clientpositive/vector_const.q  |  4 ++
 .../clientpositive/llap/subquery_notexists.q.out   |  2 +-
 .../clientpositive/llap/union_assertion_type.q.out |  8 ++--
 .../results/clientpositive/llap/vector_const.q.out | 55 +++++++++++++++++++++-
 .../clientpositive/perf/tez/cbo_query5.q.out       |  6 +--
 .../clientpositive/perf/tez/cbo_query80.q.out      |  6 +--
 .../clientpositive/perf/tez/cbo_query84.q.out      |  2 +-
 .../clientpositive/perf/tez/cbo_query91.q.out      |  2 +-
 .../perf/tez/constraints/cbo_query5.q.out          |  6 +--
 .../perf/tez/constraints/cbo_query80.q.out         |  6 +--
 .../perf/tez/constraints/cbo_query84.q.out         |  2 +-
 .../perf/tez/constraints/cbo_query91.q.out         |  2 +-
 17 files changed, 139 insertions(+), 116 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java
index 636c3a2..88aaedd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java
@@ -1226,16 +1226,8 @@ public class HiveCalciteUtil {
       // This is a validation check which can become quite handy debugging type
       // issues. Basically, we need both types to be equal, only difference should
       // be nullability.
-      // However, we make an exception for Hive wrt CHAR type because Hive encodes
-      // the STRING type for literals within CHAR value (see {@link HiveNlsString})
-      // while Calcite always considers these literals to be a CHAR, which means
-      // that the reference may be created as a STRING or VARCHAR from AST node
-      // at parsing time but the actual type referenced is a CHAR.
       if (refType2 == rightType) {
         return new RexInputRef(ref.getIndex(), refType2);
-      } else if (refType2.getFamily() == SqlTypeFamily.CHARACTER &&
-          rightType.getSqlTypeName() == SqlTypeName.CHAR && !rightType.isNullable()) {
-        return new RexInputRef(ref.getIndex(), rightType);
       }
       throw new AssertionError("mismatched type " + ref + " " + rightType);
     }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
index 5587e99..3ef5869 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.optimizer.calcite.translator;
 
+import com.google.common.base.Preconditions;
 import java.math.BigDecimal;
 import java.util.ArrayList;
 import java.util.LinkedList;
@@ -42,6 +43,7 @@ import org.apache.calcite.rex.RexWindowBound;
 import org.apache.calcite.sql.SqlKind;
 import org.apache.calcite.sql.type.SqlTypeUtil;
 import org.apache.calcite.util.DateString;
+import org.apache.calcite.util.NlsString;
 import org.apache.calcite.util.TimeString;
 import org.apache.calcite.util.TimestampString;
 import org.apache.hadoop.hive.common.type.Date;
@@ -73,7 +75,6 @@ import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFrameSpec;
 import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFunctionSpec;
 import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec;
 import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowType;
-import org.apache.hadoop.hive.ql.parse.type.RexNodeExprFactory.HiveNlsString;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -294,27 +295,27 @@ public class ExprNodeConverter extends RexVisitorImpl<ExprNodeDesc> {
     } else {
       switch (literal.getType().getSqlTypeName()) {
       case BOOLEAN:
-        return new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, Boolean.valueOf(RexLiteral
-            .booleanValue(literal)));
+        return new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo,
+            RexLiteral.booleanValue(literal));
       case TINYINT:
-        return new ExprNodeConstantDesc(TypeInfoFactory.byteTypeInfo, Byte.valueOf(((Number) literal
-            .getValue3()).byteValue()));
+        return new ExprNodeConstantDesc(TypeInfoFactory.byteTypeInfo,
+            ((Number) literal.getValue3()).byteValue());
       case SMALLINT:
         return new ExprNodeConstantDesc(TypeInfoFactory.shortTypeInfo,
-            Short.valueOf(((Number) literal.getValue3()).shortValue()));
+            ((Number) literal.getValue3()).shortValue());
       case INTEGER:
         return new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo,
-            Integer.valueOf(((Number) literal.getValue3()).intValue()));
+            ((Number) literal.getValue3()).intValue());
       case BIGINT:
-        return new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo, Long.valueOf(((Number) literal
-            .getValue3()).longValue()));
+        return new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo,
+            ((Number) literal.getValue3()).longValue());
       case FLOAT:
       case REAL:
         return new ExprNodeConstantDesc(TypeInfoFactory.floatTypeInfo,
-            Float.valueOf(((Number) literal.getValue3()).floatValue()));
+            ((Number) literal.getValue3()).floatValue());
       case DOUBLE:
         return new ExprNodeConstantDesc(TypeInfoFactory.doubleTypeInfo,
-            Double.valueOf(((Number) literal.getValue3()).doubleValue()));
+            ((Number) literal.getValue3()).doubleValue());
       case DATE:
         return new ExprNodeConstantDesc(TypeInfoFactory.dateTypeInfo,
             Date.valueOf(literal.getValueAs(DateString.class).toString()));
@@ -341,26 +342,22 @@ public class ExprNodeConverter extends RexVisitorImpl<ExprNodeDesc> {
       case DECIMAL:
         return new ExprNodeConstantDesc(TypeInfoFactory.getDecimalTypeInfo(lType.getPrecision(),
             lType.getScale()), HiveDecimal.create((BigDecimal)literal.getValue3()));
-      case VARCHAR:
       case CHAR: {
-        if (literal.getValue() instanceof HiveNlsString) {
-          HiveNlsString mxNlsString = (HiveNlsString) literal.getValue();
-          switch (mxNlsString.interpretation) {
-          case STRING:
-            return new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, literal.getValue3());
-          case CHAR: {
-            int precision = lType.getPrecision();
-            HiveChar value = new HiveChar((String) literal.getValue3(), precision);
-            return new ExprNodeConstantDesc(new CharTypeInfo(precision), value);
-          }
-          case VARCHAR: {
-            int precision = lType.getPrecision();
-            HiveVarchar value = new HiveVarchar((String) literal.getValue3(), precision);
-            return new ExprNodeConstantDesc(new VarcharTypeInfo(precision), value);
-          }
-          }
+        Preconditions.checkState(literal.getValue() instanceof NlsString,
+            "char values must use NlsString for correctness");
+        int precision = lType.getPrecision();
+        HiveChar value = new HiveChar((String) literal.getValue3(), precision);
+        return new ExprNodeConstantDesc(new CharTypeInfo(precision), value);
+      }
+      case VARCHAR: {
+        Preconditions.checkState(literal.getValue() instanceof NlsString,
+            "varchar/string values must use NlsString for correctness");
+        int precision = lType.getPrecision();
+        if (precision == Integer.MAX_VALUE) {
+          return new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, literal.getValue3());
         }
-        throw new RuntimeException("varchar/string/char values must use HiveNlsString for correctness");
+        HiveVarchar value = new HiveVarchar((String) literal.getValue3(), precision);
+        return new ExprNodeConstantDesc(new VarcharTypeInfo(precision), value);
       }
       case INTERVAL_YEAR:
       case INTERVAL_MONTH:
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
index 99dfbfc..ed7eb0e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList.Builder;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 
+import java.nio.charset.Charset;
 import org.apache.calcite.avatica.util.TimeUnit;
 import org.apache.calcite.avatica.util.TimeUnitRange;
 import org.apache.calcite.rel.type.RelDataType;
@@ -31,6 +32,7 @@ import org.apache.calcite.rex.RexCall;
 import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.rex.RexUtil;
 import org.apache.calcite.sql.SqlBinaryOperator;
+import org.apache.calcite.sql.SqlCollation;
 import org.apache.calcite.sql.SqlIntervalQualifier;
 import org.apache.calcite.sql.SqlKind;
 import org.apache.calcite.sql.SqlOperator;
@@ -39,6 +41,7 @@ import org.apache.calcite.sql.fun.SqlStdOperatorTable;
 import org.apache.calcite.sql.parser.SqlParserPos;
 import org.apache.calcite.sql.type.SqlTypeName;
 import org.apache.calcite.sql.type.SqlTypeUtil;
+import org.apache.calcite.util.ConversionUtil;
 import org.apache.calcite.util.DateString;
 import org.apache.calcite.util.TimestampString;
 import org.apache.hadoop.hive.common.type.Date;
@@ -60,7 +63,6 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveToDateSqlOpe
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.parse.type.ExprNodeTypeCheck;
 import org.apache.hadoop.hive.ql.parse.type.RexNodeExprFactory;
-import org.apache.hadoop.hive.ql.parse.type.RexNodeExprFactory.HiveNlsString.Interpretation;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
@@ -718,19 +720,30 @@ public class RexNodeConverter {
       if (value instanceof HiveChar) {
         value = ((HiveChar) value).getValue();
       }
-      calciteLiteral = rexBuilder.makeCharLiteral(
-          RexNodeExprFactory.makeHiveUnicodeString(Interpretation.CHAR, (String) value));
+      final int lengthChar = TypeInfoUtils.getCharacterLengthForType(hiveType);
+      RelDataType charType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(
+          rexBuilder.getTypeFactory().createSqlType(SqlTypeName.CHAR, lengthChar),
+          Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
+      calciteLiteral = rexBuilder.makeLiteral(
+          RexNodeExprFactory.makeHiveUnicodeString((String) value), charType, false);
       break;
     case VARCHAR:
       if (value instanceof HiveVarchar) {
         value = ((HiveVarchar) value).getValue();
       }
-      calciteLiteral = rexBuilder.makeCharLiteral(
-          RexNodeExprFactory.makeHiveUnicodeString(Interpretation.VARCHAR, (String) value));
+      final int lengthVarchar = TypeInfoUtils.getCharacterLengthForType(hiveType);
+      RelDataType varcharType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(
+          rexBuilder.getTypeFactory().createSqlType(SqlTypeName.VARCHAR, lengthVarchar),
+          Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
+      calciteLiteral = rexBuilder.makeLiteral(
+          RexNodeExprFactory.makeHiveUnicodeString((String) value), varcharType, true);
       break;
     case STRING:
-      calciteLiteral = rexBuilder.makeCharLiteral(
-          RexNodeExprFactory.makeHiveUnicodeString(Interpretation.STRING, (String) value));
+      RelDataType stringType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(
+          rexBuilder.getTypeFactory().createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE),
+          Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
+      calciteLiteral = rexBuilder.makeLiteral(
+          RexNodeExprFactory.makeHiveUnicodeString((String) value), stringType, true);
       break;
     case DATE:
       final Date date = (Date) value;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
index fc019a7..e95ff18 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
@@ -47,7 +47,6 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.Unsu
 import org.apache.hadoop.hive.ql.optimizer.calcite.translator.SqlFunctionConverter.HiveToken;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
-import org.apache.hadoop.hive.ql.parse.type.RexNodeExprFactory.HiveNlsString;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
@@ -274,32 +273,7 @@ public class TypeConverter {
     throw new CalciteSemanticException("Union type is not supported", UnsupportedFeature.Union_type);
   }
 
-  /**
-   * This method exists because type information for CHAR literals
-   * is encoded within the literal value itself. The reason is that
-   * Calcite considers any character literal as CHAR type by default,
-   * while Hive is more flexible and may consider them STRING, VARCHAR,
-   * or CHAR.
-   */
   public static TypeInfo convertLiteralType(RexLiteral literal) {
-    if (literal.getType().getSqlTypeName() == SqlTypeName.CHAR) {
-      HiveNlsString string = (HiveNlsString) RexLiteral.value(literal);
-      if (string == null) {
-        // Original type
-        return TypeConverter.convertPrimitiveType(literal.getType());
-      }
-      // Interpret
-      switch (string.interpretation) {
-        case STRING:
-          return TypeInfoFactory.stringTypeInfo;
-        case VARCHAR:
-          return TypeInfoFactory.getVarcharTypeInfo(
-              literal.getType().getPrecision());
-        case CHAR:
-          return TypeInfoFactory.getCharTypeInfo(
-              literal.getType().getPrecision());
-      }
-    }
     return TypeConverter.convertPrimitiveType(literal.getType());
   }
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/type/RexNodeExprFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/type/RexNodeExprFactory.java
index 0d00713..f68ce3a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/type/RexNodeExprFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/type/RexNodeExprFactory.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.parse.type;
 
 import com.google.common.collect.ImmutableList;
 import java.math.BigDecimal;
+import java.nio.charset.Charset;
 import java.time.Instant;
 import java.time.ZoneId;
 import java.util.ArrayList;
@@ -70,11 +71,9 @@ import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.parse.type.RexNodeExprFactory.HiveNlsString.Interpretation;
 import org.apache.hadoop.hive.ql.plan.SubqueryType;
 import org.apache.hadoop.hive.ql.udf.SettableUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -320,8 +319,8 @@ public class RexNodeExprFactory extends ExprFactory<RexNode> {
       PrimitiveTypeInfo sourceType) {
     // Extract string value if necessary
     Object constantToInterpret = constantValue;
-    if (constantValue instanceof HiveNlsString) {
-      constantToInterpret = ((HiveNlsString) constantValue).getValue();
+    if (constantValue instanceof NlsString) {
+      constantToInterpret = ((NlsString) constantValue).getValue();
     }
 
     if (constantToInterpret instanceof Number || constantToInterpret instanceof String) {
@@ -374,7 +373,7 @@ public class RexNodeExprFactory extends ExprFactory<RexNode> {
         HiveChar newValue = new HiveChar(constValue, length);
         HiveChar maxCharConst = new HiveChar(constValue, HiveChar.MAX_CHAR_LENGTH);
         if (maxCharConst.equals(newValue)) {
-          return makeHiveUnicodeString(Interpretation.CHAR, newValue.getValue());
+          return makeHiveUnicodeString(newValue.getValue());
         } else {
           return null;
         }
@@ -385,7 +384,7 @@ public class RexNodeExprFactory extends ExprFactory<RexNode> {
         HiveVarchar newValue = new HiveVarchar(constValue, length);
         HiveVarchar maxCharConst = new HiveVarchar(constValue, HiveVarchar.MAX_VARCHAR_LENGTH);
         if (maxCharConst.equals(newValue)) {
-          return makeHiveUnicodeString(Interpretation.VARCHAR, newValue.getValue());
+          return makeHiveUnicodeString(newValue.getValue());
         } else {
           return null;
         }
@@ -407,8 +406,13 @@ public class RexNodeExprFactory extends ExprFactory<RexNode> {
    */
   @Override
   protected RexLiteral createStringConstantExpr(String value) {
-    return rexBuilder.makeCharLiteral(
-        makeHiveUnicodeString(Interpretation.STRING, value));
+    RelDataType stringType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(
+        rexBuilder.getTypeFactory().createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE),
+        Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
+    // Note. Though we pass allowCast=true as parameter, this method will return a
+    // VARCHAR literal without a CAST.
+    return (RexLiteral) rexBuilder.makeLiteral(
+        makeHiveUnicodeString(value), stringType, true);
   }
 
   /**
@@ -989,22 +993,8 @@ public class RexNodeExprFactory extends ExprFactory<RexNode> {
     }
   }
 
-  public static NlsString makeHiveUnicodeString(Interpretation interpretation, String text) {
-    return new HiveNlsString(interpretation, text, ConversionUtil.NATIVE_UTF16_CHARSET_NAME, SqlCollation.IMPLICIT);
-  }
-
-  public static class HiveNlsString extends NlsString {
-
-    public enum Interpretation {
-      CHAR, VARCHAR, STRING;
-    }
-
-    public final Interpretation interpretation;
-
-    public HiveNlsString(Interpretation interpretation, String value, String charsetName, SqlCollation collation) {
-      super(value, charsetName, collation);
-      this.interpretation = interpretation;
-    }
+  public static NlsString makeHiveUnicodeString(String text) {
+    return new NlsString(text, ConversionUtil.NATIVE_UTF16_CHARSET_NAME, SqlCollation.IMPLICIT);
   }
 
 }
diff --git a/ql/src/test/queries/clientpositive/vector_const.q b/ql/src/test/queries/clientpositive/vector_const.q
index 1c7d35c..d801ca6 100644
--- a/ql/src/test/queries/clientpositive/vector_const.q
+++ b/ql/src/test/queries/clientpositive/vector_const.q
@@ -11,3 +11,7 @@ SELECT CONCAT(CAST('F' AS CHAR(2)), CAST('F' AS VARCHAR(2))) FROM VARCHAR_CONST_
 
 SELECT CONCAT(CAST('F' AS CHAR(2)), CAST('F' AS VARCHAR(2))) FROM VARCHAR_CONST_1;
 
+EXPLAIN
+SELECT CONCAT(CAST('F' AS CHAR(200)), CAST('F' AS CHAR(200))) FROM VARCHAR_CONST_1;
+
+SELECT CONCAT(CAST('F' AS CHAR(200)), CAST('F' AS CHAR(200))) FROM VARCHAR_CONST_1;
diff --git a/ql/src/test/results/clientpositive/llap/subquery_notexists.q.out b/ql/src/test/results/clientpositive/llap/subquery_notexists.q.out
index c620c47..d36243a 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_notexists.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_notexists.q.out
@@ -1621,7 +1621,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tschema
 #### A masked pattern was here ####
 CBO PLAN:
-HiveProject(eid=[_UTF-16LE'empno'], id=[$0])
+HiveProject(eid=[_UTF-16LE'empno':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[$0])
   HiveAggregate(group=[{0}])
     HiveFilter(condition=[IS NULL($1)])
       HiveJoin(condition=[=($0, $2)], joinType=[left], algorithm=[none], cost=[not available])
diff --git a/ql/src/test/results/clientpositive/llap/union_assertion_type.q.out b/ql/src/test/results/clientpositive/llap/union_assertion_type.q.out
index b1b5ddc..4c9e94e 100644
--- a/ql/src/test/results/clientpositive/llap/union_assertion_type.q.out
+++ b/ql/src/test/results/clientpositive/llap/union_assertion_type.q.out
@@ -142,10 +142,10 @@ STAGE PLANS:
                       Select Operator
                         expressions: _col0 (type: string), _col1 (type: string), '5' (type: string)
                         outputColumnNames: _col0, _col1, _col2
-                        Statistics: Num rows: 2 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 1 Data size: 255 Basic stats: COMPLETE Column stats: COMPLETE
                         File Output Operator
                           compressed: false
-                          Statistics: Num rows: 2 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE
+                          Statistics: Num rows: 1 Data size: 255 Basic stats: COMPLETE Column stats: COMPLETE
                           table:
                               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -167,10 +167,10 @@ STAGE PLANS:
                       Select Operator
                         expressions: _col0 (type: string), _col1 (type: string), '5' (type: string)
                         outputColumnNames: _col0, _col1, _col2
-                        Statistics: Num rows: 2 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 1 Data size: 255 Basic stats: COMPLETE Column stats: COMPLETE
                         File Output Operator
                           compressed: false
-                          Statistics: Num rows: 2 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE
+                          Statistics: Num rows: 1 Data size: 255 Basic stats: COMPLETE Column stats: COMPLETE
                           table:
                               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
diff --git a/ql/src/test/results/clientpositive/llap/vector_const.q.out b/ql/src/test/results/clientpositive/llap/vector_const.q.out
index fc914a0..cbeead1 100644
--- a/ql/src/test/results/clientpositive/llap/vector_const.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_const.q.out
@@ -40,7 +40,7 @@ STAGE PLANS:
                   alias: varchar_const_1
                   Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    expressions: 'FF' (type: varchar(3))
+                    expressions: 'FF' (type: varchar(4))
                     outputColumnNames: _col0
                     Statistics: Num rows: 1 Data size: 86 Basic stats: COMPLETE Column stats: COMPLETE
                     File Output Operator
@@ -68,3 +68,56 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@varchar_const_1
 #### A masked pattern was here ####
 FF
+PREHOOK: query: EXPLAIN
+SELECT CONCAT(CAST('F' AS CHAR(200)), CAST('F' AS CHAR(200))) FROM VARCHAR_CONST_1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_const_1
+#### A masked pattern was here ####
+POSTHOOK: query: EXPLAIN
+SELECT CONCAT(CAST('F' AS CHAR(200)), CAST('F' AS CHAR(200))) FROM VARCHAR_CONST_1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_const_1
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: varchar_const_1
+                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: 'FF' (type: string)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1 Data size: 86 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 1 Data size: 86 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized, llap
+            LLAP IO: all inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT CONCAT(CAST('F' AS CHAR(200)), CAST('F' AS CHAR(200))) FROM VARCHAR_CONST_1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_const_1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CONCAT(CAST('F' AS CHAR(200)), CAST('F' AS CHAR(200))) FROM VARCHAR_CONST_1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_const_1
+#### A masked pattern was here ####
+FF
diff --git a/ql/src/test/results/clientpositive/perf/tez/cbo_query5.q.out b/ql/src/test/results/clientpositive/perf/tez/cbo_query5.q.out
index 9a09d89..578070e 100644
--- a/ql/src/test/results/clientpositive/perf/tez/cbo_query5.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/cbo_query5.q.out
@@ -280,7 +280,7 @@ HiveSortLimit(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC], fetch=[100])
     HiveAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], agg#0=[sum($2)], agg#1=[sum($3)], agg#2=[sum($4)])
       HiveProject(channel=[$0], id=[$1], sales=[$2], returns=[$3], profit=[$4])
         HiveUnion(all=[true])
-          HiveProject(channel=[_UTF-16LE'store channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'store', $0)], sales=[$1], returns=[$3], profit=[-($2, $4)])
+          HiveProject(channel=[_UTF-16LE'store channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'store':VARCHAR(2147483647) CHARACTER SET "UTF-16LE", $0)], sales=[$1], returns=[$3], profit=[-($2, $4)])
             HiveAggregate(group=[{8}], agg#0=[sum($2)], agg#1=[sum($3)], agg#2=[sum($4)], agg#3=[sum($5)])
               HiveJoin(condition=[=($0, $7)], joinType=[inner], algorithm=[none], cost=[not available])
                 HiveJoin(condition=[=($1, $6)], joinType=[inner], algorithm=[none], cost=[not available])
@@ -298,7 +298,7 @@ HiveSortLimit(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC], fetch=[100])
                 HiveProject(s_store_sk=[$0], s_store_id=[$1])
                   HiveFilter(condition=[IS NOT NULL($0)])
                     HiveTableScan(table=[[default, store]], table:alias=[store])
-          HiveProject(channel=[_UTF-16LE'catalog channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'catalog_page', $0)], sales=[$1], returns=[$3], profit=[-($2, $4)])
+          HiveProject(channel=[_UTF-16LE'catalog channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'catalog_page':VARCHAR(2147483647) CHARACTER SET "UTF-16LE", $0)], sales=[$1], returns=[$3], profit=[-($2, $4)])
             HiveAggregate(group=[{1}], agg#0=[sum($4)], agg#1=[sum($5)], agg#2=[sum($6)], agg#3=[sum($7)])
               HiveJoin(condition=[=($2, $0)], joinType=[inner], algorithm=[none], cost=[not available])
                 HiveProject(cp_catalog_page_sk=[$0], cp_catalog_page_id=[$1])
@@ -316,7 +316,7 @@ HiveSortLimit(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC], fetch=[100])
                   HiveProject(d_date_sk=[$0])
                     HiveFilter(condition=[AND(BETWEEN(false, CAST($2):TIMESTAMP(9), 1998-08-04 00:00:00:TIMESTAMP(9), 1998-08-18 00:00:00:TIMESTAMP(9)), IS NOT NULL($0))])
                       HiveTableScan(table=[[default, date_dim]], table:alias=[date_dim])
-          HiveProject(channel=[_UTF-16LE'web channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'web_site', $0)], sales=[$1], returns=[$3], profit=[-($2, $4)])
+          HiveProject(channel=[_UTF-16LE'web channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'web_site':VARCHAR(2147483647) CHARACTER SET "UTF-16LE", $0)], sales=[$1], returns=[$3], profit=[-($2, $4)])
             HiveAggregate(group=[{8}], agg#0=[sum($2)], agg#1=[sum($3)], agg#2=[sum($4)], agg#3=[sum($5)])
               HiveJoin(condition=[=($0, $7)], joinType=[inner], algorithm=[none], cost=[not available])
                 HiveJoin(condition=[=($1, $6)], joinType=[inner], algorithm=[none], cost=[not available])
diff --git a/ql/src/test/results/clientpositive/perf/tez/cbo_query80.q.out b/ql/src/test/results/clientpositive/perf/tez/cbo_query80.q.out
index 5d2ab05..85fdf0c 100644
--- a/ql/src/test/results/clientpositive/perf/tez/cbo_query80.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/cbo_query80.q.out
@@ -220,7 +220,7 @@ HiveSortLimit(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC], fetch=[100])
     HiveAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], agg#0=[sum($2)], agg#1=[sum($3)], agg#2=[sum($4)])
       HiveProject(channel=[$0], id=[$1], sales=[$2], returns=[$3], profit=[$4])
         HiveUnion(all=[true])
-          HiveProject(channel=[_UTF-16LE'store channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'store', $0)], sales=[$1], returns=[$2], profit=[$3])
+          HiveProject(channel=[_UTF-16LE'store channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'store':VARCHAR(2147483647) CHARACTER SET "UTF-16LE", $0)], sales=[$1], returns=[$2], profit=[$3])
             HiveAggregate(group=[{0}], agg#0=[sum($1)], agg#1=[sum($2)], agg#2=[sum($3)])
               HiveProject($f0=[$1], $f1=[$8], $f2=[CASE(IS NOT NULL($12), $12, 0:DECIMAL(12, 2))], $f3=[-($9, CASE(IS NOT NULL($13), $13, 0:DECIMAL(12, 2)))])
                 HiveJoin(condition=[=($5, $0)], joinType=[inner], algorithm=[none], cost=[not available])
@@ -246,7 +246,7 @@ HiveSortLimit(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC], fetch=[100])
                     HiveProject(p_promo_sk=[$0])
                       HiveFilter(condition=[AND(=($11, _UTF-16LE'N'), IS NOT NULL($0))])
                         HiveTableScan(table=[[default, promotion]], table:alias=[promotion])
-          HiveProject(channel=[_UTF-16LE'catalog channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'catalog_page', $0)], sales=[$1], returns=[$2], profit=[$3])
+          HiveProject(channel=[_UTF-16LE'catalog channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'catalog_page':VARCHAR(2147483647) CHARACTER SET "UTF-16LE", $0)], sales=[$1], returns=[$2], profit=[$3])
             HiveAggregate(group=[{0}], agg#0=[sum($1)], agg#1=[sum($2)], agg#2=[sum($3)])
               HiveProject($f0=[$1], $f1=[$8], $f2=[CASE(IS NOT NULL($12), $12, 0:DECIMAL(12, 2))], $f3=[-($9, CASE(IS NOT NULL($13), $13, 0:DECIMAL(12, 2)))])
                 HiveJoin(condition=[=($4, $0)], joinType=[inner], algorithm=[none], cost=[not available])
@@ -272,7 +272,7 @@ HiveSortLimit(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC], fetch=[100])
                     HiveProject(p_promo_sk=[$0])
                       HiveFilter(condition=[AND(=($11, _UTF-16LE'N'), IS NOT NULL($0))])
                         HiveTableScan(table=[[default, promotion]], table:alias=[promotion])
-          HiveProject(channel=[_UTF-16LE'web channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'web_site', $0)], sales=[$1], returns=[$2], profit=[$3])
+          HiveProject(channel=[_UTF-16LE'web channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'web_site':VARCHAR(2147483647) CHARACTER SET "UTF-16LE", $0)], sales=[$1], returns=[$2], profit=[$3])
             HiveAggregate(group=[{0}], agg#0=[sum($1)], agg#1=[sum($2)], agg#2=[sum($3)])
               HiveProject($f0=[$15], $f1=[$7], $f2=[CASE(IS NOT NULL($11), $11, 0:DECIMAL(12, 2))], $f3=[-($8, CASE(IS NOT NULL($12), $12, 0:DECIMAL(12, 2)))])
                 HiveJoin(condition=[=($4, $14)], joinType=[inner], algorithm=[none], cost=[not available])
diff --git a/ql/src/test/results/clientpositive/perf/tez/cbo_query84.q.out b/ql/src/test/results/clientpositive/perf/tez/cbo_query84.q.out
index bac98be..d06a2a4b 100644
--- a/ql/src/test/results/clientpositive/perf/tez/cbo_query84.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/cbo_query84.q.out
@@ -67,7 +67,7 @@ HiveProject(customer_id=[$0], customername=[$1])
         HiveProject(c_customer_id=[$0], c_current_cdemo_sk=[$1], c_current_hdemo_sk=[$2], c_current_addr_sk=[$3], ||=[$4], ca_address_sk=[$5], hd_demo_sk=[$6], hd_income_band_sk=[$7], ib_income_band_sk=[$8])
           HiveJoin(condition=[=($6, $2)], joinType=[inner], algorithm=[none], cost=[not available])
             HiveJoin(condition=[=($3, $5)], joinType=[inner], algorithm=[none], cost=[not available])
-              HiveProject(c_customer_id=[$1], c_current_cdemo_sk=[$2], c_current_hdemo_sk=[$3], c_current_addr_sk=[$4], ||=[||(||($9, _UTF-16LE', '), $8)])
+              HiveProject(c_customer_id=[$1], c_current_cdemo_sk=[$2], c_current_hdemo_sk=[$3], c_current_addr_sk=[$4], ||=[||(||($9, _UTF-16LE', ':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"), $8)])
                 HiveFilter(condition=[AND(IS NOT NULL($3), IS NOT NULL($2), IS NOT NULL($4))])
                   HiveTableScan(table=[[default, customer]], table:alias=[customer])
               HiveProject(ca_address_sk=[$0])
diff --git a/ql/src/test/results/clientpositive/perf/tez/cbo_query91.q.out b/ql/src/test/results/clientpositive/perf/tez/cbo_query91.q.out
index 50fba2f..43f2293 100644
--- a/ql/src/test/results/clientpositive/perf/tez/cbo_query91.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/cbo_query91.q.out
@@ -105,6 +105,6 @@ HiveProject(call_center=[$0], call_center_name=[$1], manager=[$2], returns_loss=
                   HiveFilter(condition=[IS NOT NULL($0)])
                     HiveTableScan(table=[[default, call_center]], table:alias=[call_center])
           HiveProject(hd_demo_sk=[$0])
-            HiveFilter(condition=[AND(LIKE($2, _UTF-16LE'0-500%'), IS NOT NULL($0))])
+            HiveFilter(condition=[AND(LIKE($2, _UTF-16LE'0-500%':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"), IS NOT NULL($0))])
               HiveTableScan(table=[[default, household_demographics]], table:alias=[household_demographics])
 
diff --git a/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query5.q.out b/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query5.q.out
index 298f8e7..b88c3b8 100644
--- a/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query5.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query5.q.out
@@ -280,7 +280,7 @@ HiveSortLimit(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC], fetch=[100])
     HiveAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], agg#0=[sum($2)], agg#1=[sum($3)], agg#2=[sum($4)])
       HiveProject(channel=[$0], id=[$1], sales=[$2], returns=[$3], profit=[$4])
         HiveUnion(all=[true])
-          HiveProject(channel=[_UTF-16LE'store channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'store', $0)], sales=[$1], returns=[$3], profit=[-($2, $4)])
+          HiveProject(channel=[_UTF-16LE'store channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'store':VARCHAR(2147483647) CHARACTER SET "UTF-16LE", $0)], sales=[$1], returns=[$3], profit=[-($2, $4)])
             HiveAggregate(group=[{8}], agg#0=[sum($2)], agg#1=[sum($3)], agg#2=[sum($4)], agg#3=[sum($5)])
               HiveJoin(condition=[=($0, $7)], joinType=[inner], algorithm=[none], cost=[not available])
                 HiveJoin(condition=[=($1, $6)], joinType=[inner], algorithm=[none], cost=[not available])
@@ -297,7 +297,7 @@ HiveSortLimit(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC], fetch=[100])
                       HiveTableScan(table=[[default, date_dim]], table:alias=[date_dim])
                 HiveProject(s_store_sk=[$0], s_store_id=[$1])
                   HiveTableScan(table=[[default, store]], table:alias=[store])
-          HiveProject(channel=[_UTF-16LE'catalog channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'catalog_page', $0)], sales=[$1], returns=[$3], profit=[-($2, $4)])
+          HiveProject(channel=[_UTF-16LE'catalog channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'catalog_page':VARCHAR(2147483647) CHARACTER SET "UTF-16LE", $0)], sales=[$1], returns=[$3], profit=[-($2, $4)])
             HiveAggregate(group=[{1}], agg#0=[sum($4)], agg#1=[sum($5)], agg#2=[sum($6)], agg#3=[sum($7)])
               HiveJoin(condition=[=($2, $0)], joinType=[inner], algorithm=[none], cost=[not available])
                 HiveProject(cp_catalog_page_sk=[$0], cp_catalog_page_id=[$1])
@@ -314,7 +314,7 @@ HiveSortLimit(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC], fetch=[100])
                   HiveProject(d_date_sk=[$0])
                     HiveFilter(condition=[BETWEEN(false, CAST($2):TIMESTAMP(9), 1998-08-04 00:00:00:TIMESTAMP(9), 1998-08-18 00:00:00:TIMESTAMP(9))])
                       HiveTableScan(table=[[default, date_dim]], table:alias=[date_dim])
-          HiveProject(channel=[_UTF-16LE'web channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'web_site', $0)], sales=[$1], returns=[$3], profit=[-($2, $4)])
+          HiveProject(channel=[_UTF-16LE'web channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'web_site':VARCHAR(2147483647) CHARACTER SET "UTF-16LE", $0)], sales=[$1], returns=[$3], profit=[-($2, $4)])
             HiveAggregate(group=[{8}], agg#0=[sum($2)], agg#1=[sum($3)], agg#2=[sum($4)], agg#3=[sum($5)])
               HiveJoin(condition=[=($0, $7)], joinType=[inner], algorithm=[none], cost=[not available])
                 HiveJoin(condition=[=($1, $6)], joinType=[inner], algorithm=[none], cost=[not available])
diff --git a/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query80.q.out b/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query80.q.out
index 581afc4..53e9fbd 100644
--- a/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query80.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query80.q.out
@@ -220,7 +220,7 @@ HiveSortLimit(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC], fetch=[100])
     HiveAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], agg#0=[sum($2)], agg#1=[sum($3)], agg#2=[sum($4)])
       HiveProject(channel=[$0], id=[$1], sales=[$2], returns=[$3], profit=[$4])
         HiveUnion(all=[true])
-          HiveProject(channel=[_UTF-16LE'store channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'store', $0)], sales=[$1], returns=[$2], profit=[$3])
+          HiveProject(channel=[_UTF-16LE'store channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'store':VARCHAR(2147483647) CHARACTER SET "UTF-16LE", $0)], sales=[$1], returns=[$2], profit=[$3])
             HiveAggregate(group=[{0}], agg#0=[sum($1)], agg#1=[sum($2)], agg#2=[sum($3)])
               HiveProject($f0=[$1], $f1=[$8], $f2=[CASE(IS NOT NULL($12), $12, 0:DECIMAL(12, 2))], $f3=[-($9, CASE(IS NOT NULL($13), $13, 0:DECIMAL(12, 2)))])
                 HiveJoin(condition=[=($5, $0)], joinType=[inner], algorithm=[none], cost=[not available])
@@ -244,7 +244,7 @@ HiveSortLimit(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC], fetch=[100])
                     HiveProject(p_promo_sk=[$0])
                       HiveFilter(condition=[=($11, _UTF-16LE'N')])
                         HiveTableScan(table=[[default, promotion]], table:alias=[promotion])
-          HiveProject(channel=[_UTF-16LE'catalog channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'catalog_page', $0)], sales=[$1], returns=[$2], profit=[$3])
+          HiveProject(channel=[_UTF-16LE'catalog channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'catalog_page':VARCHAR(2147483647) CHARACTER SET "UTF-16LE", $0)], sales=[$1], returns=[$2], profit=[$3])
             HiveAggregate(group=[{0}], agg#0=[sum($1)], agg#1=[sum($2)], agg#2=[sum($3)])
               HiveProject($f0=[$1], $f1=[$8], $f2=[CASE(IS NOT NULL($12), $12, 0:DECIMAL(12, 2))], $f3=[-($9, CASE(IS NOT NULL($13), $13, 0:DECIMAL(12, 2)))])
                 HiveJoin(condition=[=($4, $0)], joinType=[inner], algorithm=[none], cost=[not available])
@@ -268,7 +268,7 @@ HiveSortLimit(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC], fetch=[100])
                     HiveProject(p_promo_sk=[$0])
                       HiveFilter(condition=[=($11, _UTF-16LE'N')])
                         HiveTableScan(table=[[default, promotion]], table:alias=[promotion])
-          HiveProject(channel=[_UTF-16LE'web channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'web_site', $0)], sales=[$1], returns=[$2], profit=[$3])
+          HiveProject(channel=[_UTF-16LE'web channel':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"], id=[||(_UTF-16LE'web_site':VARCHAR(2147483647) CHARACTER SET "UTF-16LE", $0)], sales=[$1], returns=[$2], profit=[$3])
             HiveAggregate(group=[{0}], agg#0=[sum($1)], agg#1=[sum($2)], agg#2=[sum($3)])
               HiveProject($f0=[$15], $f1=[$7], $f2=[CASE(IS NOT NULL($11), $11, 0:DECIMAL(12, 2))], $f3=[-($8, CASE(IS NOT NULL($12), $12, 0:DECIMAL(12, 2)))])
                 HiveJoin(condition=[=($4, $14)], joinType=[inner], algorithm=[none], cost=[not available])
diff --git a/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query84.q.out b/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query84.q.out
index 4a27a09..5c2d837 100644
--- a/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query84.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query84.q.out
@@ -66,7 +66,7 @@ HiveProject(customer_id=[$0], customername=[$1])
               HiveTableScan(table=[[default, customer_demographics]], table:alias=[customer_demographics])
           HiveProject(c_customer_id=[$0], c_current_cdemo_sk=[$1], c_current_hdemo_sk=[$2], c_current_addr_sk=[$3], ||=[$4], ca_address_sk=[$5])
             HiveJoin(condition=[=($3, $5)], joinType=[inner], algorithm=[none], cost=[not available])
-              HiveProject(c_customer_id=[$1], c_current_cdemo_sk=[$2], c_current_hdemo_sk=[$3], c_current_addr_sk=[$4], ||=[||(||($9, _UTF-16LE', '), $8)])
+              HiveProject(c_customer_id=[$1], c_current_cdemo_sk=[$2], c_current_hdemo_sk=[$3], c_current_addr_sk=[$4], ||=[||(||($9, _UTF-16LE', ':VARCHAR(2147483647) CHARACTER SET "UTF-16LE"), $8)])
                 HiveFilter(condition=[AND(IS NOT NULL($3), IS NOT NULL($2), IS NOT NULL($4))])
                   HiveTableScan(table=[[default, customer]], table:alias=[customer])
               HiveProject(ca_address_sk=[$0])
diff --git a/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query91.q.out b/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query91.q.out
index 8c554bd..015ab74 100644
--- a/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query91.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/constraints/cbo_query91.q.out
@@ -102,7 +102,7 @@ HiveProject(call_center=[$0], call_center_name=[$1], manager=[$2], returns_loss=
                     HiveFilter(condition=[AND(=($6, 1999), =($8, 11))])
                       HiveTableScan(table=[[default, date_dim]], table:alias=[date_dim])
             HiveProject(hd_demo_sk=[$0])
-              HiveFilter(condition=[LIKE($2, _UTF-16LE'0-500%')])
+              HiveFilter(condition=[LIKE($2, _UTF-16LE'0-500%':VARCHAR(2147483647) CHARACTER SET "UTF-16LE")])
                 HiveTableScan(table=[[default, household_demographics]], table:alias=[household_demographics])
           HiveProject(cc_call_center_sk=[$0], cc_call_center_id=[$1], cc_name=[$6], cc_manager=[$11])
             HiveTableScan(table=[[default, call_center]], table:alias=[call_center])