You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2018/07/25 18:19:18 UTC

[06/11] hive git commit: HIVE-20207: Vectorization: Fix NULL / Wrong Results issues in Filter / Compare (Matt McCline, reviewed by Teddy Choi)

http://git-wip-us.apache.org/repos/asf/hive/blob/71c49878/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterCompare.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterCompare.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterCompare.java
new file mode 100644
index 0000000..1ff11ec
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterCompare.java
@@ -0,0 +1,795 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.lang.reflect.Constructor;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Random;
+
+import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource;
+import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
+import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateAdd;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateDiff;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateSub;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.BooleanWritable;
+
+import junit.framework.Assert;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+public class TestVectorFilterCompare {
+
+  public TestVectorFilterCompare() {
+    // Arithmetic operations rely on getting conf from SessionState, need to initialize here.
+    SessionState ss = new SessionState(new HiveConf());
+    ss.getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest");
+    SessionState.setCurrentSessionState(ss);
+  }
+
+  @Test
+  public void testIntegers() throws Exception {
+    Random random = new Random(7743);
+
+    doIntegerTests(random);
+  }
+
+  @Test
+  public void testIntegerFloating() throws Exception {
+    Random random = new Random(7743);
+
+    doIntegerFloatingTests(random);
+  }
+
+  @Test
+  public void testFloating() throws Exception {
+    Random random = new Random(7743);
+
+    doFloatingTests(random);
+  }
+
+  @Test
+  public void testDecimal() throws Exception {
+    Random random = new Random(7743);
+
+    doDecimalTests(random, /* tryDecimal64 */ false);
+  }
+
+  @Test
+  public void testDecimal64() throws Exception {
+    Random random = new Random(7743);
+
+    doDecimalTests(random, /* tryDecimal64 */ true);
+  }
+
+  @Test
+  public void testTimestamp() throws Exception {
+    Random random = new Random(7743);
+
+    doTests(random, TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.timestampTypeInfo);
+
+    doTests(random, TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.longTypeInfo);
+    doTests(random, TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.doubleTypeInfo);
+
+    doTests(random, TypeInfoFactory.longTypeInfo, TypeInfoFactory.timestampTypeInfo);
+    doTests(random, TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.timestampTypeInfo);
+  }
+
+  @Test
+  public void testDate() throws Exception {
+    Random random = new Random(7743);
+
+    doTests(random, TypeInfoFactory.dateTypeInfo, TypeInfoFactory.dateTypeInfo);
+  }
+
+  @Test
+  public void testInterval() throws Exception {
+    Random random = new Random(7743);
+
+    doTests(random, TypeInfoFactory.intervalYearMonthTypeInfo, TypeInfoFactory.intervalYearMonthTypeInfo);
+    doTests(random, TypeInfoFactory.intervalDayTimeTypeInfo, TypeInfoFactory.intervalDayTimeTypeInfo);
+  }
+
+  @Test
+  public void testStringFamily() throws Exception {
+    Random random = new Random(7743);
+
+    doTests(random, TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo);
+
+    doTests(random, new CharTypeInfo(10), new CharTypeInfo(10));
+    doTests(random, new VarcharTypeInfo(10), new VarcharTypeInfo(10));
+  }
+
+  public enum FilterCompareTestMode {
+    ROW_MODE,
+    ADAPTOR,
+    FILTER_VECTOR_EXPRESSION,
+    COMPARE_VECTOR_EXPRESSION;
+
+    static final int count = values().length;
+  }
+
+  public enum ColumnScalarMode {
+    COLUMN_COLUMN,
+    COLUMN_SCALAR,
+    SCALAR_COLUMN;
+
+    static final int count = values().length;
+  }
+
+  private static TypeInfo[] integerTypeInfos = new TypeInfo[] {
+    TypeInfoFactory.byteTypeInfo,
+    TypeInfoFactory.shortTypeInfo,
+    TypeInfoFactory.intTypeInfo,
+    TypeInfoFactory.longTypeInfo
+  };
+
+  // We have test failures with FLOAT.  Ignoring this issue for now.
+  private static TypeInfo[] floatingTypeInfos = new TypeInfo[] {
+    // TypeInfoFactory.floatTypeInfo,
+    TypeInfoFactory.doubleTypeInfo
+  };
+
+  private void doIntegerTests(Random random)
+          throws Exception {
+    for (TypeInfo typeInfo : integerTypeInfos) {
+      doTests(random, typeInfo, typeInfo);
+    }
+  }
+
+  private void doIntegerFloatingTests(Random random)
+      throws Exception {
+    for (TypeInfo typeInfo1 : integerTypeInfos) {
+      for (TypeInfo typeInfo2 : floatingTypeInfos) {
+        doTests(random, typeInfo1, typeInfo2);
+      }
+    }
+    for (TypeInfo typeInfo1 : floatingTypeInfos) {
+      for (TypeInfo typeInfo2 : integerTypeInfos) {
+        doTests(random, typeInfo1, typeInfo2);
+      }
+    }
+  }
+
+  private void doFloatingTests(Random random)
+      throws Exception {
+    for (TypeInfo typeInfo1 : floatingTypeInfos) {
+      for (TypeInfo typeInfo2 : floatingTypeInfos) {
+        doTests(random, typeInfo1, typeInfo2);
+      }
+    }
+  }
+
+  private static TypeInfo[] decimalTypeInfos = new TypeInfo[] {
+    new DecimalTypeInfo(38, 18),
+    new DecimalTypeInfo(25, 2),
+    new DecimalTypeInfo(19, 4),
+    new DecimalTypeInfo(18, 10),
+    new DecimalTypeInfo(17, 3),
+    new DecimalTypeInfo(12, 2),
+    new DecimalTypeInfo(7, 1)
+  };
+
+  private void doDecimalTests(Random random, boolean tryDecimal64)
+      throws Exception {
+    for (TypeInfo typeInfo : decimalTypeInfos) {
+      doTests(random, typeInfo, typeInfo, tryDecimal64);
+    }
+  }
+
+  private TypeInfo getOutputTypeInfo(GenericUDF genericUdfClone,
+      List<ObjectInspector> objectInspectorList)
+    throws HiveException {
+
+    ObjectInspector[] array =
+        objectInspectorList.toArray(new ObjectInspector[objectInspectorList.size()]);
+    ObjectInspector outputObjectInspector = genericUdfClone.initialize(array);
+    return TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
+  }
+
+  public enum Comparison {
+    EQUALS,
+    LESS_THAN,
+    LESS_THAN_EQUAL,
+    GREATER_THAN,
+    GREATER_THAN_EQUAL,
+    NOT_EQUALS;
+  }
+
+  private TypeInfo getDecimalScalarTypeInfo(Object scalarObject) {
+    HiveDecimal dec = (HiveDecimal) scalarObject;
+    int precision = dec.precision();
+    int scale = dec.scale();
+    return new DecimalTypeInfo(precision, scale);
+  }
+
+  private boolean checkDecimal64(boolean tryDecimal64, TypeInfo typeInfo) {
+    if (!tryDecimal64 || !(typeInfo instanceof DecimalTypeInfo)) {
+      return false;
+    }
+    DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
+    boolean result = HiveDecimalWritable.isPrecisionDecimal64(decimalTypeInfo.getPrecision());
+    return result;
+  }
+
+  private void doTests(Random random, TypeInfo typeInfo1, TypeInfo typeInfo2, boolean tryDecimal64)
+      throws Exception {
+    for (ColumnScalarMode columnScalarMode : ColumnScalarMode.values()) {
+      doTestsWithDiffColumnScalar(
+          random, typeInfo1, typeInfo2, columnScalarMode, tryDecimal64);
+    }
+  }
+
+  private void doTests(Random random, TypeInfo typeInfo1, TypeInfo typeInfo2)
+      throws Exception {
+    for (ColumnScalarMode columnScalarMode : ColumnScalarMode.values()) {
+      doTestsWithDiffColumnScalar(
+          random, typeInfo1, typeInfo2, columnScalarMode);
+    }
+  }
+
+  private void doTestsWithDiffColumnScalar(Random random, TypeInfo typeInfo1, TypeInfo typeInfo2,
+      ColumnScalarMode columnScalarMode)
+          throws Exception {
+    doTestsWithDiffColumnScalar(random, typeInfo1, typeInfo2, columnScalarMode, false);
+  }
+
+  private void doTestsWithDiffColumnScalar(Random random, TypeInfo typeInfo1, TypeInfo typeInfo2,
+      ColumnScalarMode columnScalarMode, boolean tryDecimal64)
+          throws Exception {
+    for (Comparison comparison : Comparison.values()) {
+      doTestsWithDiffColumnScalar(
+          random, typeInfo1, typeInfo2, columnScalarMode, comparison, tryDecimal64);
+    }
+  }
+
+  private void doTestsWithDiffColumnScalar(Random random, TypeInfo typeInfo1, TypeInfo typeInfo2,
+      ColumnScalarMode columnScalarMode, Comparison comparison, boolean tryDecimal64)
+          throws Exception {
+
+    String typeName1 = typeInfo1.getTypeName();
+    PrimitiveCategory primitiveCategory1 =
+        ((PrimitiveTypeInfo) typeInfo1).getPrimitiveCategory();
+
+    String typeName2 = typeInfo2.getTypeName();
+    PrimitiveCategory primitiveCategory2 =
+        ((PrimitiveTypeInfo) typeInfo2).getPrimitiveCategory();
+
+    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
+    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList =
+        new ArrayList<DataTypePhysicalVariation>();
+
+    List<String> columns = new ArrayList<String>();
+    int columnNum = 0;
+
+    ExprNodeDesc col1Expr;
+    Object scalar1Object = null;
+    final boolean decimal64Enable1 = checkDecimal64(tryDecimal64, typeInfo1);
+    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN ||
+        columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
+      generationSpecList.add(
+          GenerationSpec.createSameType(typeInfo1));
+      explicitDataTypePhysicalVariationList.add(
+          decimal64Enable1 ?
+              DataTypePhysicalVariation.DECIMAL_64 :
+              DataTypePhysicalVariation.NONE);
+
+      String columnName = "col" + (columnNum++);
+      col1Expr = new ExprNodeColumnDesc(typeInfo1, columnName, "table", false);
+      columns.add(columnName);
+    } else {
+      scalar1Object =
+          VectorRandomRowSource.randomPrimitiveObject(
+              random, (PrimitiveTypeInfo) typeInfo1);
+
+      // Adjust the decimal type to the scalar's type...
+      if (typeInfo1 instanceof DecimalTypeInfo) {
+        typeInfo1 = getDecimalScalarTypeInfo(scalar1Object);
+      }
+
+      col1Expr = new ExprNodeConstantDesc(typeInfo1, scalar1Object);
+    }
+    ExprNodeDesc col2Expr;
+    Object scalar2Object = null;
+    final boolean decimal64Enable2 = checkDecimal64(tryDecimal64, typeInfo2);
+    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN ||
+        columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
+      generationSpecList.add(
+          GenerationSpec.createSameType(typeInfo2));
+
+      explicitDataTypePhysicalVariationList.add(
+          decimal64Enable2 ?
+              DataTypePhysicalVariation.DECIMAL_64 :
+              DataTypePhysicalVariation.NONE);
+
+      String columnName = "col" + (columnNum++);
+      col2Expr = new ExprNodeColumnDesc(typeInfo2, columnName, "table", false);
+      columns.add(columnName);
+    } else {
+      scalar2Object =
+          VectorRandomRowSource.randomPrimitiveObject(
+              random, (PrimitiveTypeInfo) typeInfo2);
+
+      // Adjust the decimal type to the scalar's type...
+      if (typeInfo2 instanceof DecimalTypeInfo) {
+        typeInfo2 = getDecimalScalarTypeInfo(scalar2Object);
+      }
+
+      col2Expr = new ExprNodeConstantDesc(typeInfo2, scalar2Object);
+    }
+
+    List<ObjectInspector> objectInspectorList = new ArrayList<ObjectInspector>();
+    objectInspectorList.add(VectorRandomRowSource.getObjectInspector(typeInfo1));
+    objectInspectorList.add(VectorRandomRowSource.getObjectInspector(typeInfo2));
+
+    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
+    children.add(col1Expr);
+    children.add(col2Expr);
+
+    //----------------------------------------------------------------------------------------------
+
+    String[] columnNames = columns.toArray(new String[0]);
+
+    VectorRandomRowSource rowSource = new VectorRandomRowSource();
+
+    rowSource.initGenerationSpecSchema(
+        random, generationSpecList, /* maxComplexDepth */ 0, /* allowNull */ true,
+        explicitDataTypePhysicalVariationList);
+
+    Object[][] randomRows = rowSource.randomRows(100000);
+
+    VectorRandomBatchSource batchSource =
+        VectorRandomBatchSource.createInterestingBatches(
+            random,
+            rowSource,
+            randomRows,
+            null);
+
+    GenericUDF genericUdf;
+    switch (comparison) {
+    case EQUALS:
+      genericUdf = new GenericUDFOPEqual();
+      break;
+    case LESS_THAN:
+      genericUdf = new GenericUDFOPLessThan();
+      break;
+    case LESS_THAN_EQUAL:
+      genericUdf = new GenericUDFOPEqualOrLessThan();
+      break;
+    case GREATER_THAN:
+      genericUdf = new GenericUDFOPGreaterThan();
+      break;
+    case GREATER_THAN_EQUAL:
+      genericUdf = new GenericUDFOPEqualOrGreaterThan();
+      break;
+    case NOT_EQUALS:
+      genericUdf = new GenericUDFOPNotEqual();
+      break;
+    default:
+      throw new RuntimeException("Unexpected arithmetic " + comparison);
+    }
+
+    ObjectInspector[] objectInspectors =
+        objectInspectorList.toArray(new ObjectInspector[objectInspectorList.size()]);
+    ObjectInspector outputObjectInspector = null;
+    try {
+      outputObjectInspector = genericUdf.initialize(objectInspectors);
+    } catch (Exception e) {
+      Assert.fail(e.toString());
+    }
+
+    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
+
+    ExprNodeGenericFuncDesc exprDesc =
+        new ExprNodeGenericFuncDesc(outputTypeInfo, genericUdf, children);
+
+    final int rowCount = randomRows.length;
+    Object[][] resultObjectsArray = new Object[FilterCompareTestMode.count][];
+    for (int i = 0; i < FilterCompareTestMode.count; i++) {
+
+      Object[] resultObjects = new Object[rowCount];
+      resultObjectsArray[i] = resultObjects;
+
+      FilterCompareTestMode filterCompareTestMode = FilterCompareTestMode.values()[i];
+      switch (filterCompareTestMode) {
+      case ROW_MODE:
+        doRowFilterCompareTest(
+            typeInfo1,
+            typeInfo2,
+            columns,
+            children,
+            exprDesc,
+            comparison,
+            randomRows,
+            columnScalarMode,
+            rowSource.rowStructObjectInspector(),
+            outputTypeInfo,
+            resultObjects);
+        break;
+      case ADAPTOR:
+      case FILTER_VECTOR_EXPRESSION:
+      case COMPARE_VECTOR_EXPRESSION:
+        doVectorFilterCompareTest(
+            typeInfo1,
+            typeInfo2,
+            columns,
+            columnNames,
+            rowSource.typeInfos(),
+            rowSource.dataTypePhysicalVariations(),
+            children,
+            exprDesc,
+            comparison,
+            filterCompareTestMode,
+            columnScalarMode,
+            batchSource,
+            exprDesc.getWritableObjectInspector(),
+            outputTypeInfo,
+            resultObjects);
+        break;
+      default:
+        throw new RuntimeException("Unexpected IF statement test mode " + filterCompareTestMode);
+      }
+    }
+
+    for (int i = 0; i < rowCount; i++) {
+      // Row-mode is the expected value.
+      Object expectedResult = resultObjectsArray[0][i];
+
+      for (int v = 1; v < FilterCompareTestMode.count; v++) {
+        FilterCompareTestMode filterCompareTestMode = FilterCompareTestMode.values()[v];
+        Object vectorResult = resultObjectsArray[v][i];
+        if (filterCompareTestMode == FilterCompareTestMode.FILTER_VECTOR_EXPRESSION &&
+            expectedResult == null &&
+            vectorResult != null) {
+          // This is OK.
+          boolean vectorBoolean = ((BooleanWritable) vectorResult).get();
+          if (vectorBoolean) {
+            Assert.fail(
+                "Row " + i +
+                " typeName1 " + typeName1 +
+                " typeName2 " + typeName2 +
+                " outputTypeName " + outputTypeInfo.getTypeName() +
+                " " + comparison +
+                " " + filterCompareTestMode +
+                " " + columnScalarMode +
+                " result is NOT NULL and true" +
+                " does not match row-mode expected result is NULL which means false here" +
+                (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ?
+                    " scalar1 " + scalar1Object.toString() : "") +
+                " row values " + Arrays.toString(randomRows[i]) +
+                (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ?
+                    " scalar2 " + scalar2Object.toString() : ""));
+          }
+        } else if (expectedResult == null || vectorResult == null) {
+          if (expectedResult != null || vectorResult != null) {
+            Assert.fail(
+                "Row " + i +
+                " typeName1 " + typeName1 +
+                " typeName2 " + typeName2 +
+                " outputTypeName " + outputTypeInfo.getTypeName() +
+                " " + comparison +
+                " " + filterCompareTestMode +
+                " " + columnScalarMode +
+                " result is NULL " + (vectorResult == null) +
+                " does not match row-mode expected result is NULL " + (expectedResult == null) +
+                (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ?
+                    " scalar1 " + scalar1Object.toString() : "") +
+                " row values " + Arrays.toString(randomRows[i]) +
+                (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ?
+                    " scalar2 " + scalar2Object.toString() : ""));
+          }
+        } else {
+
+          if (!expectedResult.equals(vectorResult)) {
+            Assert.fail(
+                "Row " + i +
+                " typeName1 " + typeName1 +
+                " typeName2 " + typeName2 +
+                " outputTypeName " + outputTypeInfo.getTypeName() +
+                " " + comparison +
+                " " + filterCompareTestMode +
+                " " + columnScalarMode +
+                " result " + vectorResult.toString() +
+                " (" + vectorResult.getClass().getSimpleName() + ")" +
+                " does not match row-mode expected result " + expectedResult.toString() +
+                " (" + expectedResult.getClass().getSimpleName() + ")" +
+                (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ?
+                    " scalar1 " + scalar1Object.toString() : "") +
+                " row values " + Arrays.toString(randomRows[i]) +
+                (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ?
+                    " scalar2 " + scalar2Object.toString() : ""));
+          }
+        }
+      }
+    }
+  }
+
+  private void doRowFilterCompareTest(TypeInfo typeInfo1,
+      TypeInfo typeInfo2,
+      List<String> columns, List<ExprNodeDesc> children,
+      ExprNodeGenericFuncDesc exprDesc,
+      Comparison comparison,
+      Object[][] randomRows, ColumnScalarMode columnScalarMode,
+      ObjectInspector rowInspector,
+      TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception {
+
+    /*
+    System.out.println(
+        "*DEBUG* typeInfo " + typeInfo1.toString() +
+        " typeInfo2 " + typeInfo2 +
+        " filterCompareTestMode ROW_MODE" +
+        " columnScalarMode " + columnScalarMode +
+        " exprDesc " + exprDesc.toString());
+    */
+
+    HiveConf hiveConf = new HiveConf();
+    ExprNodeEvaluator evaluator =
+        ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
+    evaluator.initialize(rowInspector);
+
+    ObjectInspector objectInspector =
+        TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(
+            outputTypeInfo);
+
+    final int rowCount = randomRows.length;
+    for (int i = 0; i < rowCount; i++) {
+      Object[] row = randomRows[i];
+      Object result = evaluator.evaluate(row);
+      Object copyResult = null;
+      try {
+        copyResult =
+            ObjectInspectorUtils.copyToStandardObject(
+                result, objectInspector, ObjectInspectorCopyOption.WRITABLE);
+      } catch (Exception e) {
+        Assert.fail(e.toString());
+      }
+      resultObjects[i] = copyResult;
+    }
+  }
+
+  private void extractResultObjects(VectorizedRowBatch batch, int rowIndex,
+      VectorExtractRow resultVectorExtractRow, Object[] scrqtchRow,
+      ObjectInspector objectInspector, Object[] resultObjects) {
+
+    boolean selectedInUse = batch.selectedInUse;
+    int[] selected = batch.selected;
+    for (int logicalIndex = 0; logicalIndex < batch.size; logicalIndex++) {
+      final int batchIndex = (selectedInUse ? selected[logicalIndex] : logicalIndex);
+      resultVectorExtractRow.extractRow(batch, batchIndex, scrqtchRow);
+
+      Object copyResult =
+          ObjectInspectorUtils.copyToStandardObject(
+              scrqtchRow[0], objectInspector, ObjectInspectorCopyOption.WRITABLE);
+      resultObjects[rowIndex++] = copyResult;
+    }
+  }
+
+  private void doVectorFilterCompareTest(TypeInfo typeInfo1,
+      TypeInfo typeInfo2,
+      List<String> columns,
+      String[] columnNames,
+      TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations,
+      List<ExprNodeDesc> children,
+      ExprNodeGenericFuncDesc exprDesc,
+      Comparison comparison,
+      FilterCompareTestMode filterCompareTestMode, ColumnScalarMode columnScalarMode,
+      VectorRandomBatchSource batchSource,
+      ObjectInspector objectInspector,
+      TypeInfo outputTypeInfo, Object[] resultObjects)
+          throws Exception {
+
+    HiveConf hiveConf = new HiveConf();
+    if (filterCompareTestMode == FilterCompareTestMode.ADAPTOR) {
+      hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
+
+      // Don't use DECIMAL_64 with the VectorUDFAdaptor.
+      dataTypePhysicalVariations = null;
+    }
+
+    VectorizationContext vectorizationContext =
+        new VectorizationContext(
+            "name",
+            columns,
+            Arrays.asList(typeInfos),
+            dataTypePhysicalVariations == null ? null : Arrays.asList(dataTypePhysicalVariations),
+            hiveConf);
+    final VectorExpressionDescriptor.Mode mode;
+    switch (filterCompareTestMode) {
+    case ADAPTOR:
+    case COMPARE_VECTOR_EXPRESSION:
+      mode = VectorExpressionDescriptor.Mode.PROJECTION;
+      break;
+    case FILTER_VECTOR_EXPRESSION:
+      mode = VectorExpressionDescriptor.Mode.FILTER;
+      break;
+    default:
+      throw new RuntimeException("Unexpected filter compare mode " + filterCompareTestMode);
+    }
+    VectorExpression vectorExpression =
+        vectorizationContext.getVectorExpression(
+            exprDesc, mode);
+    vectorExpression.transientInit();
+
+    if (filterCompareTestMode == FilterCompareTestMode.COMPARE_VECTOR_EXPRESSION &&
+        vectorExpression instanceof VectorUDFAdaptor) {
+      System.out.println(
+          "*NO NATIVE VECTOR EXPRESSION* typeInfo1 " + typeInfo1.toString() +
+          " typeInfo2 " + typeInfo2.toString() +
+          " " + comparison + " " +
+          " filterCompareTestMode " + filterCompareTestMode +
+          " columnScalarMode " + columnScalarMode +
+          " vectorExpression " + vectorExpression.toString());
+    }
+
+    String[] outputScratchTypeNames= vectorizationContext.getScratchColumnTypeNames();
+    DataTypePhysicalVariation[] outputDataTypePhysicalVariations =
+        vectorizationContext.getScratchDataTypePhysicalVariations();
+
+    VectorizedRowBatchCtx batchContext =
+        new VectorizedRowBatchCtx(
+            columnNames,
+            typeInfos,
+            dataTypePhysicalVariations,
+            /* dataColumnNums */ null,
+            /* partitionColumnCount */ 0,
+            /* virtualColumnCount */ 0,
+            /* neededVirtualColumns */ null,
+            outputScratchTypeNames,
+            outputDataTypePhysicalVariations);
+
+    VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
+
+    VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
+    final int outputColumnNum = vectorExpression.getOutputColumnNum();
+    resultVectorExtractRow.init(
+        new TypeInfo[] { outputTypeInfo }, new int[] { outputColumnNum });
+    Object[] scrqtchRow = new Object[1];
+
+    // System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
+
+    /*
+    System.out.println(
+        "*DEBUG* typeInfo1 " + typeInfo1.toString() +
+        " typeInfo2 " + typeInfo2.toString() +
+        " " + comparison + " " +
+        " filterCompareTestMode " + filterCompareTestMode +
+        " columnScalarMode " + columnScalarMode +
+        " vectorExpression " + vectorExpression.toString());
+    */
+
+    final boolean isFilter = (mode == VectorExpressionDescriptor.Mode.FILTER);
+    boolean copySelectedInUse = false;
+    int[] copySelected = new int[VectorizedRowBatch.DEFAULT_SIZE];
+
+    batchSource.resetBatchIteration();
+    int rowIndex = 0;
+    while (true) {
+      if (!batchSource.fillNextBatch(batch)) {
+        break;
+      }
+      final int originalBatchSize = batch.size;
+      if (isFilter) {
+        copySelectedInUse = batch.selectedInUse;
+        if (batch.selectedInUse) {
+          System.arraycopy(batch.selected, 0, copySelected, 0, originalBatchSize);
+        }
+      }
+
+      // In filter mode, the batch size can be made smaller.
+      vectorExpression.evaluate(batch);
+
+      if (!isFilter) {
+        extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow,
+            objectInspector, resultObjects);
+      } else {
+        final int currentBatchSize = batch.size;
+        if (copySelectedInUse && batch.selectedInUse) {
+          int selectIndex = 0;
+          for (int i = 0; i < originalBatchSize; i++) {
+            final int originalBatchIndex = copySelected[i];
+            final boolean booleanResult;
+            if (selectIndex < currentBatchSize && batch.selected[selectIndex] == originalBatchIndex) {
+              booleanResult = true;
+              selectIndex++;
+            } else {
+              booleanResult = false;
+            }
+            resultObjects[rowIndex + i] = new BooleanWritable(booleanResult);
+          }
+        } else if (batch.selectedInUse) {
+          int selectIndex = 0;
+          for (int i = 0; i < originalBatchSize; i++) {
+            final boolean booleanResult;
+            if (selectIndex < currentBatchSize && batch.selected[selectIndex] == i) {
+              booleanResult = true;
+              selectIndex++;
+            } else {
+              booleanResult = false;
+            }
+            resultObjects[rowIndex + i] = new BooleanWritable(booleanResult);
+          }
+        } else if (currentBatchSize == 0) {
+          // Whole batch got zapped.
+          for (int i = 0; i < originalBatchSize; i++) {
+            resultObjects[rowIndex + i] = new BooleanWritable(false);
+          }
+        } else {
+          // Every row kept.
+          for (int i = 0; i < originalBatchSize; i++) {
+            resultObjects[rowIndex + i] = new BooleanWritable(true);
+          }
+        }
+      }
+      rowIndex += originalBatchSize;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/71c49878/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorIfStatement.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorIfStatement.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorIfStatement.java
index 9d78ca6..58e32ca 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorIfStatement.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorIfStatement.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -155,6 +156,20 @@ public class TestVectorIfStatement {
     doIfTests(random, "decimal(10,4)");
   }
 
+  @Test
+  public void testDecimal64() throws Exception {
+    Random random = new Random(12882);
+
+    doIfTestsWithDiffColumnScalar(
+        random, "decimal(10,4)", ColumnScalarMode.COLUMN_COLUMN, DataTypePhysicalVariation.DECIMAL_64, false, false);
+    doIfTestsWithDiffColumnScalar(
+        random, "decimal(10,4)", ColumnScalarMode.COLUMN_SCALAR, DataTypePhysicalVariation.DECIMAL_64, false, false);
+    doIfTestsWithDiffColumnScalar(
+        random, "decimal(10,4)", ColumnScalarMode.SCALAR_COLUMN, DataTypePhysicalVariation.DECIMAL_64, false, false);
+    doIfTestsWithDiffColumnScalar(
+        random, "decimal(10,4)", ColumnScalarMode.SCALAR_SCALAR, DataTypePhysicalVariation.DECIMAL_64, false, false);
+  }
+
   public enum IfStmtTestMode {
     ROW_MODE,
     ADAPTOR_WHEN,
@@ -280,22 +295,6 @@ public class TestVectorIfStatement {
 
     String[] columnNames = columns.toArray(new String[0]);
 
-    String[] outputScratchTypeNames = new String[] { typeName };
-    DataTypePhysicalVariation[] outputDataTypePhysicalVariations =
-        new DataTypePhysicalVariation[] { dataTypePhysicalVariation };
-
-    VectorizedRowBatchCtx batchContext =
-        new VectorizedRowBatchCtx(
-            columnNames,
-            rowSource.typeInfos(),
-            rowSource.dataTypePhysicalVariations(),
-            /* dataColumnNums */ null,
-            /* partitionColumnCount */ 0,
-            /* virtualColumnCount */ 0,
-            /* neededVirtualColumns */ null,
-            outputScratchTypeNames,
-            outputDataTypePhysicalVariations);
-
     Object[][] randomRows = rowSource.randomRows(100000);
 
     VectorRandomBatchSource batchSource =
@@ -324,13 +323,13 @@ public class TestVectorIfStatement {
         doVectorIfTest(
             typeInfo,
             columns,
+            columnNames,
             rowSource.typeInfos(),
             rowSource.dataTypePhysicalVariations(),
             children,
             ifStmtTestMode,
             columnScalarMode,
             batchSource,
-            batchContext,
             resultObjects);
         break;
       default:
@@ -354,14 +353,6 @@ public class TestVectorIfStatement {
           }
         } else {
 
-          if (isDecimal64 && expectedResult instanceof LongWritable) {
-
-            HiveDecimalWritable expectedHiveDecimalWritable = new HiveDecimalWritable(0);
-            expectedHiveDecimalWritable.deserialize64(
-                ((LongWritable) expectedResult).get(), decimal64Scale);
-            expectedResult = expectedHiveDecimalWritable;
-          }
-
           if (!expectedResult.equals(vectorResult)) {
             Assert.fail(
                 "Row " + i + " " + IfStmtTestMode.values()[v] +
@@ -418,10 +409,11 @@ public class TestVectorIfStatement {
 
   private void doVectorIfTest(TypeInfo typeInfo,
       List<String> columns,
+      String[] columnNames,
       TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations,
       List<ExprNodeDesc> children,
       IfStmtTestMode ifStmtTestMode, ColumnScalarMode columnScalarMode,
-      VectorRandomBatchSource batchSource, VectorizedRowBatchCtx batchContext,
+      VectorRandomBatchSource batchSource,
       Object[] resultObjects)
           throws Exception {
 
@@ -453,10 +445,36 @@ public class TestVectorIfStatement {
             hiveConf);
     VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
 
+    if (ifStmtTestMode == IfStmtTestMode.VECTOR_EXPRESSION &&
+        vectorExpression instanceof VectorUDFAdaptor) {
+      System.out.println(
+          "*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() +
+          " ifStmtTestMode " + ifStmtTestMode +
+          " columnScalarMode " + columnScalarMode +
+          " vectorExpression " + vectorExpression.toString());
+    }
+
+    String[] outputScratchTypeNames= vectorizationContext.getScratchColumnTypeNames();
+    DataTypePhysicalVariation[] outputDataTypePhysicalVariations =
+        vectorizationContext.getScratchDataTypePhysicalVariations();
+
+    VectorizedRowBatchCtx batchContext =
+        new VectorizedRowBatchCtx(
+            columnNames,
+            typeInfos,
+            dataTypePhysicalVariations,
+            /* dataColumnNums */ null,
+            /* partitionColumnCount */ 0,
+            /* virtualColumnCount */ 0,
+            /* neededVirtualColumns */ null,
+            outputScratchTypeNames,
+            outputDataTypePhysicalVariations);
+
     VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
 
     VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
-    resultVectorExtractRow.init(new TypeInfo[] { typeInfo }, new int[] { columns.size() });
+    resultVectorExtractRow.init(
+        new TypeInfo[] { typeInfo }, new int[] { vectorExpression.getOutputColumnNum() });
     Object[] scrqtchRow = new Object[1];
 
     // System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
@@ -466,7 +484,7 @@ public class TestVectorIfStatement {
         "*DEBUG* typeInfo " + typeInfo.toString() +
         " ifStmtTestMode " + ifStmtTestMode +
         " columnScalarMode " + columnScalarMode +
-        " vectorExpression " + vectorExpression.getClass().getSimpleName());
+        " vectorExpression " + vectorExpression.toString());
     */
 
     batchSource.resetBatchIteration();

http://git-wip-us.apache.org/repos/asf/hive/blob/71c49878/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorNegative.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorNegative.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorNegative.java
index 289efb9..54c085b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorNegative.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorNegative.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
@@ -406,6 +407,14 @@ public class TestVectorNegative {
     VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
     vectorExpression.transientInit();
 
+    if (negativeTestMode == NegativeTestMode.VECTOR_EXPRESSION &&
+        vectorExpression instanceof VectorUDFAdaptor) {
+      System.out.println(
+          "*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() +
+          " negativeTestMode " + negativeTestMode +
+          " vectorExpression " + vectorExpression.toString());
+    }
+
     String[] outputScratchTypeNames= vectorizationContext.getScratchColumnTypeNames();
 
     VectorizedRowBatchCtx batchContext =

http://git-wip-us.apache.org/repos/asf/hive/blob/71c49878/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringConcat.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringConcat.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringConcat.java
index 3d030e6..69fd70c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringConcat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringConcat.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
@@ -400,6 +401,16 @@ public class TestVectorStringConcat {
     VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
     vectorExpression.transientInit();
 
+    if (stringConcatTestMode == StringConcatTestMode.VECTOR_EXPRESSION &&
+        vectorExpression instanceof VectorUDFAdaptor) {
+      System.out.println(
+          "*NO NATIVE VECTOR EXPRESSION* stringTypeInfo1 " + stringTypeInfo1.toString() +
+          " stringTypeInfo2 " + stringTypeInfo2.toString() +
+          " stringConcatTestMode " + stringConcatTestMode +
+          " columnScalarMode " + columnScalarMode +
+          " vectorExpression " + vectorExpression.toString());
+    }
+
     VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
 
     VectorExtractRow resultVectorExtractRow = new VectorExtractRow();

http://git-wip-us.apache.org/repos/asf/hive/blob/71c49878/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringUnary.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringUnary.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringUnary.java
index 4398554..f029358 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringUnary.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringUnary.java
@@ -38,7 +38,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.StringGenerationOption;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorTimestampExtract.TimestampExtractTestMode;
+import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
@@ -104,8 +104,6 @@ public class TestVectorStringUnary {
       doTests(random, typeName, "trim");
 
       doTests(random, typeName, "initcap");
-
-      doTests(random, typeName, "hex");
     }
 
     doTests(random, typeName, "lower");
@@ -341,6 +339,14 @@ public class TestVectorStringUnary {
             hiveConf);
     VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
 
+    if (stringUnaryTestMode == StringUnaryTestMode.VECTOR_EXPRESSION &&
+        vectorExpression instanceof VectorUDFAdaptor) {
+      System.out.println(
+          "*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() +
+          " stringUnaryTestMode " + stringUnaryTestMode +
+          " vectorExpression " + vectorExpression.toString());
+    }
+
     VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
 
     VectorExtractRow resultVectorExtractRow = new VectorExtractRow();

http://git-wip-us.apache.org/repos/asf/hive/blob/71c49878/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorSubStr.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorSubStr.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorSubStr.java
index 62d296d..694f6f7 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorSubStr.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorSubStr.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.StringGenerationOption;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
@@ -320,6 +321,14 @@ public class TestVectorSubStr {
             hiveConf);
     VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
 
+    if (subStrTestMode == SubStrTestMode.VECTOR_EXPRESSION &&
+        vectorExpression instanceof VectorUDFAdaptor) {
+      System.out.println(
+          "*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() +
+          " subStrTestMode " + subStrTestMode +
+          " vectorExpression " + vectorExpression.toString());
+    }
+
     VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
 
     VectorExtractRow resultVectorExtractRow = new VectorExtractRow();

http://git-wip-us.apache.org/repos/asf/hive/blob/71c49878/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java
index 3220f4c..5d5e4c9 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec;
+import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
@@ -395,6 +396,14 @@ public class TestVectorTimestampExtract {
     VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
     vectorExpression.transientInit();
 
+    if (timestampExtractTestMode == TimestampExtractTestMode.VECTOR_EXPRESSION &&
+        vectorExpression instanceof VectorUDFAdaptor) {
+      System.out.println(
+          "*NO NATIVE VECTOR EXPRESSION* dateTimeStringTypeInfo " + dateTimeStringTypeInfo.toString() +
+          " timestampExtractTestMode " + timestampExtractTestMode +
+          " vectorExpression " + vectorExpression.toString());
+    }
+
     // System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
 
     /*

http://git-wip-us.apache.org/repos/asf/hive/blob/71c49878/ql/src/test/results/clientpositive/convert_decimal64_to_decimal.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/convert_decimal64_to_decimal.q.out b/ql/src/test/results/clientpositive/convert_decimal64_to_decimal.q.out
index 8e538d2..5189957 100644
--- a/ql/src/test/results/clientpositive/convert_decimal64_to_decimal.q.out
+++ b/ql/src/test/results/clientpositive/convert_decimal64_to_decimal.q.out
@@ -181,7 +181,7 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: SelectColumnIsNotNull(col 33:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 19:decimal(4,2)/DECIMAL_64) -> 33:decimal(4,2))
+                  predicateExpression: SelectColumnIsNotNull(col 19:decimal(4,2)/DECIMAL_64)
               predicate: decimal0402_col_20 is not null (type: boolean)
               Statistics: Num rows: 1000 Data size: 2087309 Basic stats: COMPLETE Column stats: NONE
               Select Operator
@@ -199,7 +199,7 @@ STAGE PLANS:
                     0 _col0 (type: decimal(9,2))
                     1 _col1 (type: decimal(9,2))
                   Map Join Vectorization:
-                      bigTableKeyExpressions: ConvertDecimal64ToDecimal(col 19:decimal(9,2)/DECIMAL_64) -> 34:decimal(9,2)
+                      bigTableKeyExpressions: ConvertDecimal64ToDecimal(col 19:decimal(9,2)/DECIMAL_64) -> 33:decimal(9,2)
                       bigTableValueExpressions: col 14:smallint
                       className: VectorMapJoinOperator
                       native: false
@@ -244,7 +244,7 @@ STAGE PLANS:
               includeColumns: [14, 19]
               dataColumns: tinyint_col_1:tinyint, float_col_2:float, bigint_col_3:bigint, boolean_col_4:boolean, decimal0202_col_5:decimal(2,2)/DECIMAL_64, decimal1612_col_6:decimal(16,12)/DECIMAL_64, double_col_7:double, char0205_col_8:char(205), bigint_col_9:bigint, decimal1202_col_10:decimal(12,2)/DECIMAL_64, boolean_col_11:boolean, double_col_12:double, decimal2208_col_13:decimal(22,8), decimal3722_col_14:decimal(37,22), smallint_col_15:smallint, decimal2824_col_16:decimal(28,24), boolean_col_17:boolean, float_col_18:float, timestamp_col_19:timestamp, decimal0402_col_20:decimal(4,2)/DECIMAL_64, char0208_col_21:char(208), char0077_col_22:char(77), decimal2915_col_23:decimal(29,15), char0234_col_24:char(234), timestamp_col_25:timestamp, tinyint_col_26:tinyint, decimal3635_col_27:decimal(36,35), boolean_col_28:boolean, float_col_29:float, smallint_col_30:smallint, varchar0200_col_31:varchar(200), boolean_col_32:boolean
               partitionColumnCount: 0
-              scratchColumnTypeNames: [decimal(4,2), decimal(9,2)]
+              scratchColumnTypeNames: [decimal(9,2)]
       Local Work:
         Map Reduce Local Work
       Reduce Vectorization:

http://git-wip-us.apache.org/repos/asf/hive/blob/71c49878/ql/src/test/results/clientpositive/llap/convert_decimal64_to_decimal.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/convert_decimal64_to_decimal.q.out b/ql/src/test/results/clientpositive/llap/convert_decimal64_to_decimal.q.out
index cbc6b25..fddd2cb 100644
--- a/ql/src/test/results/clientpositive/llap/convert_decimal64_to_decimal.q.out
+++ b/ql/src/test/results/clientpositive/llap/convert_decimal64_to_decimal.q.out
@@ -163,7 +163,7 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: SelectColumnIsNotNull(col 20:decimal(8,1))(children: ConvertDecimal64ToDecimal(col 3:decimal(8,1)/DECIMAL_64) -> 20:decimal(8,1))
+                        predicateExpression: SelectColumnIsNotNull(col 3:decimal(8,1)/DECIMAL_64)
                     predicate: decimal0801_col_4 is not null (type: boolean)
                     Statistics: Num rows: 1026 Data size: 109272 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
@@ -181,7 +181,7 @@ STAGE PLANS:
                           0 _col0 (type: decimal(9,2))
                           1 _col1 (type: decimal(9,2))
                         Map Join Vectorization:
-                            bigTableKeyExpressions: ConvertDecimal64ToDecimal(col 3:decimal(9,2)/DECIMAL_64) -> 21:decimal(9,2)
+                            bigTableKeyExpressions: ConvertDecimal64ToDecimal(col 3:decimal(9,2)/DECIMAL_64) -> 20:decimal(9,2)
                             className: VectorMapJoinOperator
                             native: false
                             nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true
@@ -229,7 +229,7 @@ STAGE PLANS:
                     includeColumns: [3]
                     dataColumns: float_col_1:float, varchar0037_col_2:varchar(37), decimal2912_col_3:decimal(29,12), decimal0801_col_4:decimal(8,1)/DECIMAL_64, timestamp_col_5:timestamp, boolean_col_6:boolean, string_col_7:string, tinyint_col_8:tinyint, boolean_col_9:boolean, decimal1614_col_10:decimal(16,14)/DECIMAL_64, boolean_col_11:boolean, float_col_12:float, char0116_col_13:char(116), boolean_col_14:boolean, string_col_15:string, double_col_16:double, string_col_17:string, bigint_col_18:bigint, int_col_19:int
                     partitionColumnCount: 0
-                    scratchColumnTypeNames: [decimal(8,1), decimal(9,2), bigint]
+                    scratchColumnTypeNames: [decimal(9,2), bigint]
         Map 3 
             Map Operator Tree:
                 TableScan
@@ -243,7 +243,7 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: SelectColumnIsNotNull(col 33:decimal(4,2))(children: ConvertDecimal64ToDecimal(col 19:decimal(4,2)/DECIMAL_64) -> 33:decimal(4,2))
+                        predicateExpression: SelectColumnIsNotNull(col 19:decimal(4,2)/DECIMAL_64)
                     predicate: decimal0402_col_20 is not null (type: boolean)
                     Statistics: Num rows: 950 Data size: 104800 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
@@ -282,7 +282,7 @@ STAGE PLANS:
                     includeColumns: [14, 19]
                     dataColumns: tinyint_col_1:tinyint, float_col_2:float, bigint_col_3:bigint, boolean_col_4:boolean, decimal0202_col_5:decimal(2,2)/DECIMAL_64, decimal1612_col_6:decimal(16,12)/DECIMAL_64, double_col_7:double, char0205_col_8:char(205), bigint_col_9:bigint, decimal1202_col_10:decimal(12,2)/DECIMAL_64, boolean_col_11:boolean, double_col_12:double, decimal2208_col_13:decimal(22,8), decimal3722_col_14:decimal(37,22), smallint_col_15:smallint, decimal2824_col_16:decimal(28,24), boolean_col_17:boolean, float_col_18:float, timestamp_col_19:timestamp, decimal0402_col_20:decimal(4,2)/DECIMAL_64, char0208_col_21:char(208), char0077_col_22:char(77), decimal2915_col_23:decimal(29,15), char0234_col_24:char(234), timestamp_col_25:timestamp, tinyint_col_26:tinyint, decimal3635_col_27:decimal(36,35), boolean_col_28:boolean, float_col_29:float, smallint_col_30:smallint, varchar0200_col_31:varchar(200), boolean_col_32:boolean
                     partitionColumnCount: 0
-                    scratchColumnTypeNames: [decimal(4,2)]
+                    scratchColumnTypeNames: []
         Reducer 2 
             Execution mode: vectorized, llap
             Reduce Vectorization:

http://git-wip-us.apache.org/repos/asf/hive/blob/71c49878/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out b/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out
index b531d79..3ab6547 100644
--- a/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_case_when_1.q.out
@@ -523,8 +523,8 @@ STAGE PLANS:
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
-                        projectedOutputColumnNums: [4, 22, 24, 25, 26, 27, 28, 30, 31, 32, 33, 34, 35, 38, 40, 43, 44]
-                        selectExpressions: IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 21:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 22:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 21:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprStringScalarStringScalar(col 20:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 21:string) -> 22:string) -> 21:string) -> 22:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 24:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean
 , val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprColumnNull(col 20:boolean, col 21:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean, ConstantVectorExpression(val Many) -> 21:string) -> 23:string) -> 24:string) -> 23:string) -> 24:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprNullNull(null, null) -> 23:string) -> 25:string) -> 23:string) -> 25:string, IfExprLongColumnLongColumn(col 17:boolean, col 18:date, col 19:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 17:boolean, VectorUDFDateAddColSca
 lar(col 10:date, val 10) -> 18:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 19:date) -> 26:date, IfExprDoubleColumnLongScalar(col 17:boolean, col 28:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 27:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 27:double) -> 28:double) -> 27:double, IfExprDoubleColumnDoubleScalar(col 17:boolean, col 29:double, val 0.0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 28:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 28:double) -> 29:double) -> 28:double, IfExprNullColumn(col 17:boolean, null, col 46)(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 17:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_
 64) -> 46:decimal(10,2)) -> 30:decimal(10,2), IfExprColumnNull(col 18:boolean, col 47:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 18:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 47:decimal(10,2)) -> 31:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 32:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 33:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax)
 )(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 34:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 35:decimal(10,2), IfExprTimestampColumnColumn(col 19:boolean, col 36:timestampcol 37:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 19:boolean, CastDateToTimestamp(col 12:date) -> 36:timestamp, CastDateToTimestamp(col 11:date) -> 37:timestamp) -> 38:timestamp, IfExprColumnNull(col 19:boolean, col 39:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 19:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 39:int) -> 40:int, IfExprNullColumn(col 41:boolean, null, col 42)(children: LongColGreaterLongScalar(
 col 2:int, val 10000) -> 41:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 42:int) -> 43:int, IfExprLongScalarLongScalar(col 45:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 44:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 44:int) -> 45:boolean) -> 44:date
+                        projectedOutputColumnNums: [4, 22, 24, 25, 26, 27, 28, 30, 31, 32, 33, 34, 36, 40, 42, 45, 46]
+                        selectExpressions: IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 21:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 22:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 21:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprStringScalarStringScalar(col 20:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean) -> 21:string) -> 22:string) -> 21:string) -> 22:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 24:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean
 , val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprColumnNull(col 20:boolean, col 21:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 20:boolean, ConstantVectorExpression(val Many) -> 21:string) -> 23:string) -> 24:string) -> 23:string) -> 24:string, IfExprStringScalarStringGroupColumn(col 17:boolean, val Singlecol 23:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, IfExprStringScalarStringGroupColumn(col 18:boolean, val Twocol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 18:boolean, IfExprStringScalarStringGroupColumn(col 19:boolean, val Somecol 23:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 19:boolean, IfExprNullNull(null, null) -> 23:string) -> 25:string) -> 23:string) -> 25:string, IfExprLongColumnLongColumn(col 17:boolean, col 18:date, col 19:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 17:boolean, VectorUDFDateAddColSca
 lar(col 10:date, val 10) -> 18:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 19:date) -> 26:date, IfExprDoubleColumnLongScalar(col 17:boolean, col 28:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 27:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 27:double) -> 28:double) -> 27:double, IfExprDoubleColumnDoubleScalar(col 17:boolean, col 29:double, val 0.0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 17:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 28:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 28:double) -> 29:double) -> 28:double, IfExprNullColumn(col 17:boolean, null, col 48)(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 17:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_
 64) -> 48:decimal(10,2)) -> 30:decimal(10,2), IfExprColumnNull(col 18:boolean, col 49:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 18:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 49:decimal(10,2)) -> 31:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 32:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 33:decimal(12,2), IfExprDecimal64ScalarDecimal64Column(col 19:boolean, decimal64Val 0, decimalVal 0, col
  7:decimal(1,0)/DECIMAL_64)(children: StringGroupColEqualStringScalar(col 23:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 19:boolean) -> 34:decimal(10,2)/DECIMAL_64, IfExprDecimal64ColumnDecimal64Scalar(col 35:boolean, col 7:decimal(10,2)/DECIMAL_64, decimal64Val 0, decimalVal 0)(children: StringGroupColEqualStringScalar(col 23:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 23:string) -> 35:boolean) -> 36:decimal(10,2)/DECIMAL_64, IfExprTimestampColumnColumn(col 37:boolean, col 38:timestampcol 39:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 37:boolean, CastDateToTimestamp(col 12:date) -> 38:timestamp, CastDateToTimestamp(col 11:date) -> 39:timestamp) -> 40:timestamp, IfExprColumnNull(col 37:boolean, col 41:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 37:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 41:int) -> 42:int, IfExpr
 NullColumn(col 43:boolean, null, col 44)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 43:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 44:int) -> 45:int, IfExprLongScalarLongScalar(col 47:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 46:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 46:int) -> 47:boolean) -> 46:date
                     Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
@@ -552,7 +552,7 @@ STAGE PLANS:
                     includeColumns: [1, 2, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14]
                     dataColumns: l_orderkey:int, l_partkey:int, l_suppkey:int, l_linenumber:int, l_quantity:int, l_extendedprice:double, l_discount:double, l_tax:decimal(10,2)/DECIMAL_64, l_returnflag:char(1), l_linestatus:char(1), l_shipdate:date, l_commitdate:date, l_receiptdate:date, l_shipinstruct:varchar(20), l_shipmode:char(10), l_comment:string
                     partitionColumnCount: 0
-                    scratchColumnTypeNames: [bigint, bigint, bigint, bigint, string, string, string, string, string, bigint, double, double, double, decimal(10,2), decimal(10,2), decimal(12,2), decimal(12,2), decimal(10,2), decimal(10,2), timestamp, timestamp, timestamp, bigint, bigint, bigint, bigint, bigint, bigint, bigint, decimal(10,2), decimal(10,2)]
+                    scratchColumnTypeNames: [bigint, bigint, bigint, bigint, string, string, string, string, string, bigint, double, double, double, decimal(10,2), decimal(10,2), decimal(12,2), decimal(12,2), decimal(10,2)/DECIMAL_64, bigint, decimal(10,2)/DECIMAL_64, bigint, timestamp, timestamp, timestamp, bigint, bigint, bigint, bigint, bigint, bigint, bigint, decimal(10,2), decimal(10,2)]
 
   Stage: Stage-0
     Fetch Operator
@@ -856,8 +856,8 @@ STAGE PLANS:
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
-                        projectedOutputColumnNums: [4, 27, 38, 48, 52, 54, 60, 63, 65, 67, 68, 69, 70, 73, 76, 79, 80]
-                        selectExpressions: IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 26:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, ConstantVectorExpression(val Single) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 19:boolean, ConstantVectorExpression(val Two) -> 20:string, IfExprColumnCondExpr(col 21:boolean, col 22:stringcol 24:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 21:boolean, ConstantVectorExpression(val Some) -> 22:string, IfExprStringScalarStringScalar(col 23:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 23:boolean) -> 24:string) -> 25:string) -> 26:string) -> 27:string, IfExprColumnCondExpr(col 23:boolean, col 28:stringcol 37:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 23:boolean, ConstantVectorExpression(val Single) -> 28:string, IfExprColumnCondExpr(col 29:boolea
 n, col 30:stringcol 36:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 29:boolean, ConstantVectorExpression(val Two) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 35:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 31:boolean, ConstantVectorExpression(val Some) -> 32:string, IfExprColumnNull(col 33:boolean, col 34:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 33:boolean, ConstantVectorExpression(val Many) -> 34:string) -> 35:string) -> 36:string) -> 37:string) -> 38:string, IfExprColumnCondExpr(col 39:boolean, col 40:stringcol 47:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 39:boolean, ConstantVectorExpression(val Single) -> 40:string, IfExprColumnCondExpr(col 41:boolean, col 42:stringcol 46:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 41:boolean, ConstantVectorExpression(val Two) -> 42:string, IfExprColumnCondExpr(col 43:boolean, col 44:stringcol 45:string)(children: LongColLes
 sLongScalar(col 4:int, val 10) -> 43:boolean, ConstantVectorExpression(val Some) -> 44:string, IfExprNullNull(null, null) -> 45:string) -> 46:string) -> 47:string) -> 48:string, IfExprCondExprCondExpr(col 49:boolean, col 50:datecol 51:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 49:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 50:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 51:date) -> 52:date, IfExprDoubleColumnLongScalar(col 57:boolean, col 58:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 54:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 54:double) -> 58:double) -> 54:double, IfExprCondExprColumn(col 57:boolean, col 59:double, col 58:double)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 58:double)(children: DoubleScalarSubtractD
 oubleColumn(val 1.0, col 6:double) -> 58:double) -> 59:double, ConstantVectorExpression(val 0.0) -> 58:double) -> 60:double, IfExprNullColumn(col 62:boolean, null, col 82)(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 62:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 82:decimal(10,2)) -> 63:decimal(10,2), IfExprColumnNull(col 64:boolean, col 83:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 64:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 83:decimal(10,2)) -> 65:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 
 66:boolean) -> 67:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 68:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 69:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 70:decimal(10,2), IfExprCondExprCondExpr(col 66:boolean, col 71:timestampcol 72:timestamp)(children: LongColGreaterLongScalar(col 1:int, val 30) -> 66:boolean, CastDateTo
 Timestamp(col 12:date) -> 71:timestamp, CastDateToTimestamp(col 11:date) -> 72:timestamp) -> 73:timestamp, IfExprCondExprNull(col 74:boolean, col 75:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 74:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 75:int) -> 76:int, IfExprNullCondExpr(col 77:boolean, null, col 78:int)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 77:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 78:int) -> 79:int, IfExprLongScalarLongScalar(col 81:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 80:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 80:int) -> 81:boolean) -> 80:date
+                        projectedOutputColumnNums: [4, 27, 38, 48, 52, 54, 60, 63, 65, 67, 68, 69, 71, 75, 78, 81, 82]
+                        selectExpressions: IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 26:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 17:boolean, ConstantVectorExpression(val Single) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 25:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 19:boolean, ConstantVectorExpression(val Two) -> 20:string, IfExprColumnCondExpr(col 21:boolean, col 22:stringcol 24:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 21:boolean, ConstantVectorExpression(val Some) -> 22:string, IfExprStringScalarStringScalar(col 23:boolean, val Many, val Huge number)(children: LongColLessLongScalar(col 4:int, val 100) -> 23:boolean) -> 24:string) -> 25:string) -> 26:string) -> 27:string, IfExprColumnCondExpr(col 23:boolean, col 28:stringcol 37:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 23:boolean, ConstantVectorExpression(val Single) -> 28:string, IfExprColumnCondExpr(col 29:boolea
 n, col 30:stringcol 36:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 29:boolean, ConstantVectorExpression(val Two) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 35:string)(children: LongColLessLongScalar(col 4:int, val 10) -> 31:boolean, ConstantVectorExpression(val Some) -> 32:string, IfExprColumnNull(col 33:boolean, col 34:string, null)(children: LongColLessLongScalar(col 4:int, val 100) -> 33:boolean, ConstantVectorExpression(val Many) -> 34:string) -> 35:string) -> 36:string) -> 37:string) -> 38:string, IfExprColumnCondExpr(col 39:boolean, col 40:stringcol 47:string)(children: LongColEqualLongScalar(col 4:int, val 1) -> 39:boolean, ConstantVectorExpression(val Single) -> 40:string, IfExprColumnCondExpr(col 41:boolean, col 42:stringcol 46:string)(children: LongColEqualLongScalar(col 4:int, val 2) -> 41:boolean, ConstantVectorExpression(val Two) -> 42:string, IfExprColumnCondExpr(col 43:boolean, col 44:stringcol 45:string)(children: LongColLes
 sLongScalar(col 4:int, val 10) -> 43:boolean, ConstantVectorExpression(val Some) -> 44:string, IfExprNullNull(null, null) -> 45:string) -> 46:string) -> 47:string) -> 48:string, IfExprCondExprCondExpr(col 49:boolean, col 50:datecol 51:date)(children: StringGroupColEqualCharScalar(col 14:char(10), val SHIP) -> 49:boolean, VectorUDFDateAddColScalar(col 10:date, val 10) -> 50:date, VectorUDFDateAddColScalar(col 10:date, val 5) -> 51:date) -> 52:date, IfExprDoubleColumnLongScalar(col 57:boolean, col 58:double, val 0)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 54:double)(children: DoubleScalarSubtractDoubleColumn(val 1.0, col 6:double) -> 54:double) -> 58:double) -> 54:double, IfExprCondExprColumn(col 57:boolean, col 59:double, col 58:double)(children: StringGroupColEqualCharScalar(col 8:char(1), val N) -> 57:boolean, DoubleColMultiplyDoubleColumn(col 5:double, col 58:double)(children: DoubleScalarSubtractD
 oubleColumn(val 1.0, col 6:double) -> 58:double) -> 59:double, ConstantVectorExpression(val 0.0) -> 58:double) -> 60:double, IfExprNullColumn(col 62:boolean, null, col 84)(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 62:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 84:decimal(10,2)) -> 63:decimal(10,2), IfExprColumnNull(col 64:boolean, col 85:decimal(10,2), null)(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 64:boolean, ConvertDecimal64ToDecimal(col 7:decimal(10,2)/DECIMAL_64) -> 85:decimal(10,2)) -> 65:decimal(10,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'DELIVER IN PERSON'), 0, l_tax))(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 
 66:boolean) -> 67:decimal(12,2), VectorUDFAdaptor(if((CAST( l_shipinstruct AS STRING) = 'TAKE BACK RETURN'), l_tax, 0))(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 68:decimal(12,2), IfExprDecimal64ScalarDecimal64Column(col 66:boolean, decimal64Val 0, decimalVal 0, col 7:decimal(1,0)/DECIMAL_64)(children: StringGroupColEqualStringScalar(col 61:string, val DELIVER IN PERSON)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 66:boolean) -> 69:decimal(10,2)/DECIMAL_64, IfExprDecimal64ColumnDecimal64Scalar(col 70:boolean, col 7:decimal(10,2)/DECIMAL_64, decimal64Val 0, decimalVal 0)(children: StringGroupColEqualStringScalar(col 61:string, val TAKE BACK RETURN)(children: CastStringGroupToString(col 13:varchar(20)) -> 61:string) -> 70:boolean) -> 71:decimal(10,2)/DECIMAL_64, IfExprCondExprCondExpr(col 72:boolean, col 73:timestampcol 74:timestamp)(chi
 ldren: LongColGreaterLongScalar(col 1:int, val 30) -> 72:boolean, CastDateToTimestamp(col 12:date) -> 73:timestamp, CastDateToTimestamp(col 11:date) -> 74:timestamp) -> 75:timestamp, IfExprCondExprNull(col 76:boolean, col 77:int, null)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 76:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 77:int) -> 78:int, IfExprNullCondExpr(col 79:boolean, null, col 80:int)(children: LongColGreaterLongScalar(col 2:int, val 10000) -> 79:boolean, VectorUDFDateDiffColCol(col 12:date, col 11:date) -> 80:int) -> 81:int, IfExprLongScalarLongScalar(col 83:boolean, val 14245, val 14609)(children: LongColGreaterLongScalar(col 82:int, val 100)(children: LongColModuloLongScalar(col 2:int, val 500) -> 82:int) -> 83:boolean) -> 82:date
                     Statistics: Num rows: 101 Data size: 57327 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
@@ -885,7 +885,7 @@ STAGE PLANS:
                     includeColumns: [1, 2, 4, 5, 6, 7, 8, 10, 11, 12, 13, 14]
                     dataColumns: l_orderkey:int, l_partkey:int, l_suppkey:int, l_linenumber:int, l_quantity:int, l_extendedprice:double, l_discount:double, l_tax:decimal(10,2)/DECIMAL_64, l_returnflag:char(1), l_linestatus:char(1), l_shipdate:date, l_commitdate:date, l_receiptdate:date, l_shipinstruct:varchar(20), l_shipmode:char(10), l_comment:string
                     partitionColumnCount: 0
-                    scratchColumnTypeNames: [bigint, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, bigint, bigint, bigint, bigint, double, double, bigint, bigint, double, double, double, string, bigint, decimal(10,2), bigint, decimal(10,2), bigint, decimal(12,2), decimal(12,2), decimal(10,2), decimal(10,2), timestamp, timestamp, timestamp, bigint, bigint, bigint, bigint, bigint, bigint, bigint, bigint, decimal(10,2), decimal(10,2)]
+                    scratchColumnTypeNames: [bigint, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, string, bigint, string, bigint, string, string, string, string, string, bigint, bigint, bigint, bigint, bigint, double, double, bigint, bigint, double, double, double, string, bigint, decimal(10,2), bigint, decimal(10,2), bigint, decimal(12,2), decimal(12,2), decimal(10,2)/DECIMAL_64, bigint, decimal(10,2)/DECIMAL_64, bigint, timestamp, timestamp, timestamp, bigint, bigint, bigint, bigint, bigint, bigint, bigint, bigint, decimal(10,2), decimal(10,2)]
 
   Stage: Stage-0
     Fetch Operator