You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kg...@apache.org on 2019/03/27 09:23:34 UTC

[hive] 02/02: HIVE-15406: Consider vectorizing the new trunc function (Laszlo Bodor via Zoltan Haindrich)

This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit a7a9f516b3803830999f60d2bffb45e76c8a91ef
Author: Laszlo Bodor <bo...@gmail.com>
AuthorDate: Wed Mar 27 09:24:54 2019 +0100

    HIVE-15406: Consider vectorizing the new trunc function (Laszlo Bodor via Zoltan Haindrich)
    
    Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>
---
 .../exec/vector/expressions/TruncDateFromDate.java |   39 +
 .../vector/expressions/TruncDateFromString.java    |   51 +
 .../vector/expressions/TruncDateFromTimestamp.java |  164 +++
 .../ql/exec/vector/expressions/TruncDecimal.java   |   54 +
 .../vector/expressions/TruncDecimalNoScale.java    |   29 +
 .../ql/exec/vector/expressions/TruncFloat.java     |  149 +++
 .../exec/vector/expressions/TruncFloatNoScale.java |   29 +
 .../hive/ql/udf/generic/GenericUDFTrunc.java       |   13 +-
 .../test/queries/clientpositive/vector_udf_trunc.q |  110 ++
 .../results/clientpositive/vector_udf_trunc.q.out  | 1343 ++++++++++++++++++++
 10 files changed, 1979 insertions(+), 2 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromDate.java
new file mode 100644
index 0000000..e560de2
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromDate.java
@@ -0,0 +1,39 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType;
+
+/**
+ * Vectorized implementation of trunc(date, fmt) function date timestamp input
+ */
+public class TruncDateFromDate extends TruncDateFromTimestamp {
+  private transient Date date = new Date();
+
+  public TruncDateFromDate(int colNum, byte[] fmt, int outputColumnNum) {
+    super(colNum, fmt, outputColumnNum);
+  }
+
+  private static final long serialVersionUID = 1L;
+
+  public TruncDateFromDate() {
+    super();
+  }
+
+  protected void truncDate(ColumnVector inV, BytesColumnVector outV, int i) {
+    truncDate((LongColumnVector) inV, outV, i);
+  }
+
+  protected void truncDate(LongColumnVector inV, BytesColumnVector outV, int i) {
+    date = Date.ofEpochMilli(inV.vector[i]);
+    processDate(outV, i, date);
+  }
+
+  @Override
+  protected ArgumentType getInputColumnType() {
+    return VectorExpressionDescriptor.ArgumentType.DATE;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromString.java
new file mode 100644
index 0000000..0a0c65b
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromString.java
@@ -0,0 +1,51 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.nio.charset.StandardCharsets;
+
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType;
+
+/**
+ * Vectorized implementation of trunc(date, fmt) function for string input
+ */
+public class TruncDateFromString extends TruncDateFromTimestamp {
+  private transient Date date = new Date();
+
+  public TruncDateFromString(int colNum, byte[] fmt, int outputColumnNum) {
+    super(colNum, fmt, outputColumnNum);
+  }
+
+  private static final long serialVersionUID = 1L;
+
+  public TruncDateFromString() {
+    super();
+  }
+
+  protected void truncDate(ColumnVector inV, BytesColumnVector outV, int i) {
+    truncDate((BytesColumnVector) inV, outV, i);
+  }
+
+  protected void truncDate(BytesColumnVector inV, BytesColumnVector outV, int i) {
+    if (inV.vector[i] == null) {
+      outV.isNull[i] = true;
+      outV.noNulls = false;
+    }
+
+    String dateString =
+        new String(inV.vector[i], inV.start[i], inV.length[i], StandardCharsets.UTF_8);
+    if (dateParser.parseDate(dateString, date)) {
+      processDate(outV, i, date);
+    } else {
+      outV.isNull[i] = true;
+      outV.noNulls = false;
+    }
+  }
+
+  @Override
+  protected ArgumentType getInputColumnType() {
+    return VectorExpressionDescriptor.ArgumentType.STRING_FAMILY;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromTimestamp.java
new file mode 100644
index 0000000..1a1f146
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromTimestamp.java
@@ -0,0 +1,164 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.common.util.DateParser;
+
+/**
+ * Vectorized implementation of trunc(date, fmt) function for timestamp input
+ */
+public class TruncDateFromTimestamp extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+  protected int colNum;
+  protected String fmt;
+  protected transient final DateParser dateParser = new DateParser();
+
+  public TruncDateFromTimestamp() {
+    super();
+    colNum = -1;
+  }
+
+  public TruncDateFromTimestamp(int colNum, byte[] fmt, int outputColumnNum) {
+    super(outputColumnNum);
+    this.colNum = colNum;
+    this.fmt = new String(fmt, StandardCharsets.UTF_8);
+  }
+
+  @Override
+  public String vectorExpressionParameters() {
+    return "col " + colNum + ", format " + fmt;
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) throws HiveException {
+
+    if (childExpressions != null) {
+      this.evaluateChildren(batch);
+    }
+
+    ColumnVector inputColVector = batch.cols[colNum];
+    BytesColumnVector outputColVector = (BytesColumnVector) batch.cols[outputColumnNum];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    // We do not need to do a column reset since we are carefully changing the output.
+    outputColVector.isRepeating = false;
+
+    if (inputColVector.isRepeating) {
+      if (inputColVector.noNulls || !inputIsNull[0]) {
+        outputIsNull[0] = false;
+        truncDate(inputColVector, outputColVector, 0);
+      } else {
+        outputIsNull[0] = true;
+        outputColVector.noNulls = false;
+      }
+      outputColVector.isRepeating = true;
+      return;
+    }
+
+    if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+
+        // CONSIDER: For large n, fill n or all of isNull array and use the tighter ELSE loop.
+        if (!outputColVector.noNulls) {
+          for (int j = 0; j != n; j++) {
+            final int i = sel[j];
+            // Set isNull before call in case it changes it mind.
+            outputIsNull[i] = false;
+            truncDate(inputColVector, outputColVector, i);
+          }
+        } else {
+          for (int j = 0; j != n; j++) {
+            final int i = sel[j];
+            truncDate(inputColVector, outputColVector, i);
+          }
+        }
+      } else {
+        if (!outputColVector.noNulls) {
+
+          // Assume it is almost always a performance win to fill all of isNull so we can
+          // safely reset noNulls.
+          Arrays.fill(outputIsNull, false);
+          outputColVector.noNulls = true;
+        }
+        for (int i = 0; i != n; i++) {
+          truncDate(inputColVector, outputColVector, i);
+        }
+      }
+    } else /* there are nulls in the inputColVector */ {
+
+      // Carefully handle NULLs...
+      outputColVector.noNulls = false;
+
+      if (batch.selectedInUse) {
+        for (int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputIsNull[i] = inputIsNull[i];
+          truncDate(inputColVector, outputColVector, i);
+        }
+      } else {
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+        for (int i = 0; i != n; i++) {
+          if (!inputColVector.isNull[i]) {
+            truncDate(inputColVector, outputColVector, i);
+          }
+        }
+      }
+    }
+  }
+
+  protected void truncDate(ColumnVector inV, BytesColumnVector outV, int i) {
+    Date date = Date.ofEpochMilli(((TimestampColumnVector) inV).getTime(i));
+    processDate(outV, i, date);
+  }
+
+  protected void processDate(BytesColumnVector outV, int i, Date date) {
+    if ("MONTH".equals(fmt) || "MON".equals(fmt) || "MM".equals(fmt)) {
+      date.setDayOfMonth(1);
+    } else if ("QUARTER".equals(fmt) || "Q".equals(fmt)) {
+      int month = date.getMonth() - 1;
+      int quarter = month / 3;
+      int monthToSet = quarter * 3 + 1;
+      date.setMonth(monthToSet);
+      date.setDayOfMonth(1);
+    } else if ("YEAR".equals(fmt) || "YYYY".equals(fmt) || "YY".equals(fmt)) {
+      date.setMonth(1);
+      date.setDayOfMonth(1);
+    }
+    byte[] bytes = date.toString().getBytes(StandardCharsets.UTF_8);
+    outV.setVal(i, bytes, 0, bytes.length);
+  }
+
+  @Override
+  public Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION).setNumArguments(2)
+        .setArgumentTypes(getInputColumnType(),
+            VectorExpressionDescriptor.ArgumentType.STRING_FAMILY)
+        .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR);
+    return b.build();
+  }
+
+  protected ArgumentType getInputColumnType() {
+    return VectorExpressionDescriptor.ArgumentType.TIMESTAMP;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimal.java
new file mode 100644
index 0000000..7b25cc6
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimal.java
@@ -0,0 +1,54 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+
+/**
+ * Vectorized implementation of trunc(number, scale) function for decimal input
+ */
+public class TruncDecimal extends TruncFloat {
+  /**
+   * 
+   */
+  private static final long serialVersionUID = 1L;
+
+  public TruncDecimal() {
+    super();
+  }
+
+  public TruncDecimal(int colNum, int scale, int outputColumnNum) {
+    super(colNum, scale, outputColumnNum);
+  }
+
+  @Override
+  protected void trunc(ColumnVector inputColVector, ColumnVector outputColVector, int i) {
+    HiveDecimal input = ((DecimalColumnVector) inputColVector).vector[i].getHiveDecimal();
+
+    HiveDecimal output = trunc(input);
+    ((DecimalColumnVector) outputColVector).vector[i] = new HiveDecimalWritable(output);
+  }
+
+  protected HiveDecimal trunc(HiveDecimal input) {
+    HiveDecimal pow = HiveDecimal.create(Math.pow(10, Math.abs(scale)));
+
+    if (scale >= 0) {
+      if (scale != 0) {
+        long longValue = input.multiply(pow).longValue();
+        return HiveDecimal.create(longValue).divide(pow);
+      } else {
+        return HiveDecimal.create(input.longValue());
+      }
+    } else {
+      long longValue2 = input.divide(pow).longValue();
+      return HiveDecimal.create(longValue2).multiply(pow);
+    }
+  }
+
+  protected ArgumentType getInputColumnType() {
+    return VectorExpressionDescriptor.ArgumentType.DECIMAL;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimalNoScale.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimalNoScale.java
new file mode 100644
index 0000000..c427db8
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimalNoScale.java
@@ -0,0 +1,29 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
+
+/**
+ * Vectorized implementation of trunc(number) function for decimal input
+ */
+public class TruncDecimalNoScale extends TruncDecimal {
+  private static final long serialVersionUID = 1L;
+
+  public TruncDecimalNoScale() {
+    super();
+    colNum = -1;
+  }
+
+  public TruncDecimalNoScale(int colNum, int outputColumnNum) {
+    super(colNum, 0, outputColumnNum);
+  }
+
+  @Override
+  public Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION).setNumArguments(1)
+        .setArgumentTypes(getInputColumnType())
+        .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN);
+    return b.build();
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloat.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloat.java
new file mode 100644
index 0000000..93f175b
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloat.java
@@ -0,0 +1,149 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.math.BigDecimal;
+import java.util.Arrays;
+
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Vectorized implementation of trunc(number, scale) function for float/double input
+ */
+public class TruncFloat extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+  protected int colNum;
+  protected int scale;
+
+  public TruncFloat() {
+    super();
+    colNum = -1;
+  }
+
+  public TruncFloat(int colNum, int scale, int outputColumnNum) {
+    super(outputColumnNum);
+    this.colNum = colNum;
+    this.scale = scale;
+  }
+
+  @Override
+  public String vectorExpressionParameters() {
+    return "col " + colNum + ", scale " + scale;
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) throws HiveException {
+    if (childExpressions != null) {
+      this.evaluateChildren(batch);
+    }
+
+    ColumnVector inputColVector = batch.cols[colNum];
+    ColumnVector outputColVector = batch.cols[outputColumnNum];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    int n = batch.size;
+
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating = false;
+
+    if (inputColVector.isRepeating) {
+      if (inputColVector.noNulls || !inputIsNull[0]) {
+        outputIsNull[0] = false;
+        trunc(inputColVector, outputColVector, 0);
+      } else {
+        outputIsNull[0] = true;
+        outputColVector.noNulls = false;
+      }
+      outputColVector.isRepeating = true;
+      return;
+    }
+
+    if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+        if (!outputColVector.noNulls) {
+          for (int j = 0; j != n; j++) {
+            final int i = sel[j];
+            outputIsNull[i] = false;
+            trunc(inputColVector, outputColVector, i);
+          }
+        } else {
+          for (int j = 0; j != n; j++) {
+            final int i = sel[j];
+            trunc(inputColVector, outputColVector, i);
+          }
+        }
+      } else {
+        if (!outputColVector.noNulls) {
+          Arrays.fill(outputIsNull, false);
+          outputColVector.noNulls = true;
+        }
+        for (int i = 0; i != n; i++) {
+          trunc(inputColVector, outputColVector, i);
+        }
+      }
+    } else {
+      outputColVector.noNulls = false;
+
+      if (batch.selectedInUse) {
+        for (int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputIsNull[i] = inputIsNull[i];
+          trunc(inputColVector, outputColVector, i);
+        }
+      } else {
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+        for (int i = 0; i != n; i++) {
+          if (!inputColVector.isNull[i]) {
+            trunc(inputColVector, outputColVector, i);
+          }
+        }
+      }
+    }
+  }
+
+  protected void trunc(ColumnVector inputColVector, ColumnVector outputColVector, int i) {
+    BigDecimal input = BigDecimal.valueOf(((DoubleColumnVector) inputColVector).vector[i]);
+
+    double output = trunc(input).doubleValue();
+    ((DoubleColumnVector) outputColVector).vector[i] = output;
+  }
+
+  protected BigDecimal trunc(BigDecimal input) {
+    BigDecimal pow = BigDecimal.valueOf(Math.pow(10, Math.abs(scale)));
+
+    if (scale >= 0) {
+      if (scale != 0) {
+        long longValue = input.multiply(pow).longValue();
+        return BigDecimal.valueOf(longValue).divide(pow);
+      } else {
+        return BigDecimal.valueOf(input.longValue());
+      }
+    } else {
+      long longValue2 = input.divide(pow).longValue();
+      return BigDecimal.valueOf(longValue2).multiply(pow);
+    }
+  }
+
+  @Override
+  public Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION).setNumArguments(2)
+        .setArgumentTypes(getInputColumnType(), VectorExpressionDescriptor.ArgumentType.INT_FAMILY)
+        .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR);
+    return b.build();
+  }
+
+  protected ArgumentType getInputColumnType() {
+    return VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloatNoScale.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloatNoScale.java
new file mode 100644
index 0000000..1309428
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloatNoScale.java
@@ -0,0 +1,29 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
+
+/**
+ * Vectorized implementation of trunc(number) function for float/double input
+ */
+public class TruncFloatNoScale extends TruncFloat {
+  private static final long serialVersionUID = 1L;
+
+  public TruncFloatNoScale() {
+    super();
+    colNum = -1;
+  }
+
+  public TruncFloatNoScale(int colNum, int outputColumnNum) {
+    super(colNum, 0, outputColumnNum);
+  }
+
+  @Override
+  public Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION).setNumArguments(1)
+        .setArgumentTypes(getInputColumnType())
+        .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN);
+    return b.build();
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
index 7a7d13e..c56d0f2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
@@ -28,6 +28,14 @@ import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncDateFromDate;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncDateFromString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncDateFromTimestamp;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncDecimal;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncDecimalNoScale;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncFloat;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncFloatNoScale;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritableV2;
@@ -62,8 +70,7 @@ import org.apache.hadoop.io.Text;
     + "to the unit specified by the format model fmt. If you omit fmt, then date is truncated to "
     + "the nearest day. It currently only supports 'MONTH'/'MON'/'MM', 'QUARTER'/'Q' and 'YEAR'/'YYYY'/'YY' as format."
     + "If input is a number group returns N truncated to D decimal places. If D is omitted, then N is truncated to 0 places."
-    + "D can be negative to truncate (make zero) D digits left of the decimal point."
-    , extended = "date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'."
+    + "D can be negative to truncate (make zero) D digits left of the decimal point.", extended = "date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'."
         + " The time part of date is ignored.\n" + "Example:\n "
         + " > SELECT _FUNC_('2009-02-12', 'MM');\n" + "OK\n" + " '2009-02-01'" + "\n"
         + " > SELECT _FUNC_('2017-03-15', 'Q');\n" + "OK\n" + " '2017-01-01'" + "\n"
@@ -72,6 +79,8 @@ import org.apache.hadoop.io.Text;
         + " > SELECT _FUNC_(1234567891.1234567891,-4);\n" + "OK\n" + " 1234560000"
         + " > SELECT _FUNC_(1234567891.1234567891,0);\n" + "OK\n" + " 1234567891" + "\n"
         + " > SELECT _FUNC_(1234567891.1234567891);\n" + "OK\n" + " 1234567891")
+@VectorizedExpressions({ TruncDateFromTimestamp.class, TruncDateFromString.class,
+    TruncDateFromDate.class, TruncFloat.class, TruncFloatNoScale.class, TruncDecimal.class, TruncDecimalNoScale.class})
 public class GenericUDFTrunc extends GenericUDF {
 
   private transient TimestampConverter timestampConverter;
diff --git a/ql/src/test/queries/clientpositive/vector_udf_trunc.q b/ql/src/test/queries/clientpositive/vector_udf_trunc.q
new file mode 100644
index 0000000..51ed109
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/vector_udf_trunc.q
@@ -0,0 +1,110 @@
+--! qt:dataset:alltypesorc
+set hive.fetch.task.conversion=none;
+set hive.vectorized.execution.enabled=true;
+
+DESCRIBE FUNCTION trunc;
+DESCRIBE FUNCTION EXTENDED trunc;
+
+CREATE TABLE trunc_number(c DOUBLE) STORED AS ORC;
+INSERT INTO TABLE trunc_number VALUES (12345.54321);
+INSERT INTO TABLE trunc_number VALUES (12345);
+INSERT INTO TABLE trunc_number VALUES (0.54321);
+INSERT INTO TABLE trunc_number VALUES (NULL);
+
+-- trunc date from timestamp
+explain vectorization detail select trunc(ctimestamp1, 'MM') from alltypesorc;
+
+select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(ctimestamp1, 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(ctimestamp1, 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+
+-- trunc date from string
+explain vectorization detail select trunc(CAST(ctimestamp1 AS STRING), 'MM') from alltypesorc;
+
+select trunc(CAST(ctimestamp1 AS STRING), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS STRING), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS STRING), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+
+-- trunc date from date
+explain vectorization detail select trunc(CAST(ctimestamp1 AS DATE), 'MM') from alltypesorc;
+
+select trunc(CAST(ctimestamp1 AS DATE), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS DATE), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS DATE), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+
+select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc WHERE ctimestamp1 IS NULL LIMIT 10;
+
+-- trunc double
+explain vectorization detail
+select c, trunc(c,0) from trunc_number order by c;
+select c, 0, trunc(c,0) from trunc_number order by c;
+select c, -1, trunc(c,-1) from trunc_number order by c;
+select c, 1, trunc(c,1) from trunc_number order by c;
+
+-- trunc float
+explain vectorization detail
+select c, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c;
+select c, 0, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c;
+select c, -1, trunc(CAST (c AS FLOAT), -1) from trunc_number order by c;
+select c, 1, trunc(CAST (c AS FLOAT), 1) from trunc_number order by c;
+
+-- trunc decimal
+explain vectorization detail
+select c, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c;
+select c, 0, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c;
+select c, -1, trunc(CAST (c AS DECIMAL(10,5)), -1) from trunc_number order by c;
+select c, 1, trunc(CAST (c AS DECIMAL(10,5)), 1) from trunc_number order by c;
+
+-- scale not defined -> 0 (float)
+explain vectorization detail
+select c, trunc(c) from trunc_number order by c;
+select c, trunc(c) from trunc_number order by c;
+
+-- scale not defined -> 0 (decimal)
+explain vectorization detail
+select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c;
+select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c;
+
+
+
+set hive.vectorized.execution.enabled=false;
+
+-- trunc date from timestamp
+select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(ctimestamp1, 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(ctimestamp1, 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+
+-- trunc date from string
+select trunc(CAST(ctimestamp1 AS STRING), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS STRING), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS STRING), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+
+-- trunc date from date
+select trunc(CAST(ctimestamp1 AS DATE), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS DATE), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS DATE), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+
+select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc WHERE ctimestamp1 IS NULL LIMIT 10;
+
+-- trunc double
+select c, 0, trunc(c,0) from trunc_number order by c;
+select c, -1, trunc(c,-1) from trunc_number order by c;
+select c, 1, trunc(c,1) from trunc_number order by c;
+
+-- trunc float
+select c, 0, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c;
+select c, -1, trunc(CAST (c AS FLOAT), -1) from trunc_number order by c;
+select c, 1, trunc(CAST (c AS FLOAT), 1) from trunc_number order by c;
+
+-- trunc decimal
+select c, 0, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c;
+select c, -1, trunc(CAST (c AS DECIMAL(10,5)), -1) from trunc_number order by c;
+select c, 1, trunc(CAST (c AS DECIMAL(10,5)), 1) from trunc_number order by c;
+
+-- scale not defined -> 0 (float)
+select c, trunc(c) from trunc_number order by c;
+
+-- scale not defined -> 0 (decimal)
+select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c;
+
+drop table trunc_number;
diff --git a/ql/src/test/results/clientpositive/vector_udf_trunc.q.out b/ql/src/test/results/clientpositive/vector_udf_trunc.q.out
new file mode 100644
index 0000000..a6ce91f
--- /dev/null
+++ b/ql/src/test/results/clientpositive/vector_udf_trunc.q.out
@@ -0,0 +1,1343 @@
+PREHOOK: query: DESCRIBE FUNCTION trunc
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION trunc
+POSTHOOK: type: DESCFUNCTION
+trunc(date, fmt) / trunc(N,D) - Returns If input is date returns date with the time portion of the day truncated to the unit specified by the format model fmt. If you omit fmt, then date is truncated to the nearest day. It currently only supports 'MONTH'/'MON'/'MM', 'QUARTER'/'Q' and 'YEAR'/'YYYY'/'YY' as format.If input is a number group returns N truncated to D decimal places. If D is omitted, then N is truncated to 0 places.D can be negative to truncate (make zero) D digits left of th [...]
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED trunc
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED trunc
+POSTHOOK: type: DESCFUNCTION
+trunc(date, fmt) / trunc(N,D) - Returns If input is date returns date with the time portion of the day truncated to the unit specified by the format model fmt. If you omit fmt, then date is truncated to the nearest day. It currently only supports 'MONTH'/'MON'/'MM', 'QUARTER'/'Q' and 'YEAR'/'YYYY'/'YY' as format.If input is a number group returns N truncated to D decimal places. If D is omitted, then N is truncated to 0 places.D can be negative to truncate (make zero) D digits left of th [...]
+date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'. The time part of date is ignored.
+Example:
+  > SELECT trunc('2009-02-12', 'MM');
+OK
+ '2009-02-01'
+ > SELECT trunc('2017-03-15', 'Q');
+OK
+ '2017-01-01'
+ > SELECT trunc('2015-10-27', 'YEAR');
+OK
+ '2015-01-01' > SELECT trunc(1234567891.1234567891,4);
+OK
+ 1234567891.1234
+ > SELECT trunc(1234567891.1234567891,-4);
+OK
+ 1234560000 > SELECT trunc(1234567891.1234567891,0);
+OK
+ 1234567891
+ > SELECT trunc(1234567891.1234567891);
+OK
+ 1234567891
+Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDFTrunc
+Function type:BUILTIN
+PREHOOK: query: CREATE TABLE trunc_number(c DOUBLE) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@trunc_number
+POSTHOOK: query: CREATE TABLE trunc_number(c DOUBLE) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@trunc_number
+PREHOOK: query: INSERT INTO TABLE trunc_number VALUES (12345.54321)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@trunc_number
+POSTHOOK: query: INSERT INTO TABLE trunc_number VALUES (12345.54321)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@trunc_number
+POSTHOOK: Lineage: trunc_number.c SCRIPT []
+PREHOOK: query: INSERT INTO TABLE trunc_number VALUES (12345)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@trunc_number
+POSTHOOK: query: INSERT INTO TABLE trunc_number VALUES (12345)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@trunc_number
+POSTHOOK: Lineage: trunc_number.c SCRIPT []
+PREHOOK: query: INSERT INTO TABLE trunc_number VALUES (0.54321)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@trunc_number
+POSTHOOK: query: INSERT INTO TABLE trunc_number VALUES (0.54321)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@trunc_number
+POSTHOOK: Lineage: trunc_number.c SCRIPT []
+PREHOOK: query: INSERT INTO TABLE trunc_number VALUES (NULL)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@trunc_number
+POSTHOOK: query: INSERT INTO TABLE trunc_number VALUES (NULL)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@trunc_number
+POSTHOOK: Lineage: trunc_number.c EXPRESSION []
+PREHOOK: query: explain vectorization detail select trunc(ctimestamp1, 'MM') from alltypesorc
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail select trunc(ctimestamp1, 'MM') from alltypesorc
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: alltypesorc
+            Statistics: Num rows: 12288 Data size: 491520 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: trunc(ctimestamp1, 'MM') (type: string)
+              outputColumnNames: _col0
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [13]
+                  selectExpressions: TruncDateFromTimestamp(col 8, format MM) -> 13:string
+              Statistics: Num rows: 12288 Data size: 2260992 Basic stats: COMPLETE Column stats: COMPLETE
+              File Output Operator
+                compressed: false
+                File Sink Vectorization:
+                    className: VectorFileSinkOperator
+                    native: false
+                Statistics: Num rows: 12288 Data size: 2260992 Basic stats: COMPLETE Column stats: COMPLETE
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 12
+              includeColumns: [8]
+              dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [string]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:43.619
+1969-12-01	1969-12-31 15:59:43.627
+1969-12-01	1969-12-31 15:59:43.628
+1969-12-01	1969-12-31 15:59:43.631
+1969-12-01	1969-12-31 15:59:43.637
+1969-12-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(ctimestamp1, 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:43.619
+1969-10-01	1969-12-31 15:59:43.627
+1969-10-01	1969-12-31 15:59:43.628
+1969-10-01	1969-12-31 15:59:43.631
+1969-10-01	1969-12-31 15:59:43.637
+1969-10-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(ctimestamp1, 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:43.619
+1969-01-01	1969-12-31 15:59:43.627
+1969-01-01	1969-12-31 15:59:43.628
+1969-01-01	1969-12-31 15:59:43.631
+1969-01-01	1969-12-31 15:59:43.637
+1969-01-01	1969-12-31 15:59:43.64
+PREHOOK: query: explain vectorization detail select trunc(CAST(ctimestamp1 AS STRING), 'MM') from alltypesorc
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail select trunc(CAST(ctimestamp1 AS STRING), 'MM') from alltypesorc
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: alltypesorc
+            Statistics: Num rows: 12288 Data size: 491520 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: trunc(CAST( ctimestamp1 AS STRING), 'MM') (type: string)
+              outputColumnNames: _col0
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [14]
+                  selectExpressions: TruncDateFromString(col 13, format MM)(children: CastTimestampToString(col 8:timestamp) -> 13:string) -> 14:string
+              Statistics: Num rows: 12288 Data size: 2260992 Basic stats: COMPLETE Column stats: COMPLETE
+              File Output Operator
+                compressed: false
+                File Sink Vectorization:
+                    className: VectorFileSinkOperator
+                    native: false
+                Statistics: Num rows: 12288 Data size: 2260992 Basic stats: COMPLETE Column stats: COMPLETE
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 12
+              includeColumns: [8]
+              dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [string, string]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:43.619
+1969-12-01	1969-12-31 15:59:43.627
+1969-12-01	1969-12-31 15:59:43.628
+1969-12-01	1969-12-31 15:59:43.631
+1969-12-01	1969-12-31 15:59:43.637
+1969-12-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:43.619
+1969-10-01	1969-12-31 15:59:43.627
+1969-10-01	1969-12-31 15:59:43.628
+1969-10-01	1969-12-31 15:59:43.631
+1969-10-01	1969-12-31 15:59:43.637
+1969-10-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:43.619
+1969-01-01	1969-12-31 15:59:43.627
+1969-01-01	1969-12-31 15:59:43.628
+1969-01-01	1969-12-31 15:59:43.631
+1969-01-01	1969-12-31 15:59:43.637
+1969-01-01	1969-12-31 15:59:43.64
+PREHOOK: query: explain vectorization detail select trunc(CAST(ctimestamp1 AS DATE), 'MM') from alltypesorc
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail select trunc(CAST(ctimestamp1 AS DATE), 'MM') from alltypesorc
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: alltypesorc
+            Statistics: Num rows: 12288 Data size: 491520 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: trunc(CAST( ctimestamp1 AS DATE), 'MM') (type: string)
+              outputColumnNames: _col0
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [14]
+                  selectExpressions: TruncDateFromDate(col 13, format MM)(children: CastTimestampToDate(col 8:timestamp) -> 13:date) -> 14:string
+              Statistics: Num rows: 12288 Data size: 2260992 Basic stats: COMPLETE Column stats: COMPLETE
+              File Output Operator
+                compressed: false
+                File Sink Vectorization:
+                    className: VectorFileSinkOperator
+                    native: false
+                Statistics: Num rows: 12288 Data size: 2260992 Basic stats: COMPLETE Column stats: COMPLETE
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 12
+              includeColumns: [8]
+              dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [bigint, string]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:43.619
+1969-12-01	1969-12-31 15:59:43.627
+1969-12-01	1969-12-31 15:59:43.628
+1969-12-01	1969-12-31 15:59:43.631
+1969-12-01	1969-12-31 15:59:43.637
+1969-12-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:43.619
+1969-10-01	1969-12-31 15:59:43.627
+1969-10-01	1969-12-31 15:59:43.628
+1969-10-01	1969-12-31 15:59:43.631
+1969-10-01	1969-12-31 15:59:43.637
+1969-10-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:43.619
+1969-01-01	1969-12-31 15:59:43.627
+1969-01-01	1969-12-31 15:59:43.628
+1969-01-01	1969-12-31 15:59:43.631
+1969-01-01	1969-12-31 15:59:43.637
+1969-01-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc WHERE ctimestamp1 IS NULL LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc WHERE ctimestamp1 IS NULL LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+PREHOOK: query: explain vectorization detail
+select c, trunc(c,0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail
+select c, trunc(c,0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: trunc_number
+            Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:c:double, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: c (type: double), trunc(c, 0) (type: double)
+              outputColumnNames: _col0, _col1
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [0, 2]
+                  selectExpressions: TruncFloat(col 0, scale 0) -> 2:double
+              Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+              Reduce Output Operator
+                key expressions: _col0 (type: double)
+                sort order: +
+                Reduce Sink Vectorization:
+                    className: VectorReduceSinkOperator
+                    native: false
+                    nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                    nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+                Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+                value expressions: _col1 (type: double)
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 1
+              includeColumns: [0]
+              dataColumns: c:double
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [double]
+      Reduce Vectorization:
+          enabled: false
+          enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true
+          enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select c, 0, trunc(c,0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 0, trunc(c,0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0	0.0
+12345.0	0	12345.0
+12345.54321	0	12345.0
+NULL	0	NULL
+PREHOOK: query: select c, -1, trunc(c,-1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, -1, trunc(c,-1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	-1	0.0
+12345.0	-1	12340.0
+12345.54321	-1	12340.0
+NULL	-1	NULL
+PREHOOK: query: select c, 1, trunc(c,1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 1, trunc(c,1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	1	0.5
+12345.0	1	12345.0
+12345.54321	1	12345.5
+NULL	1	NULL
+PREHOOK: query: explain vectorization detail
+select c, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail
+select c, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: trunc_number
+            Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:c:double, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: c (type: double), trunc(UDFToFloat(c), 0) (type: float)
+              outputColumnNames: _col0, _col1
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [0, 2]
+                  selectExpressions: TruncFloat(col 0, scale 0)(children: col 0:double) -> 2:float
+              Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+              Reduce Output Operator
+                key expressions: _col0 (type: double)
+                sort order: +
+                Reduce Sink Vectorization:
+                    className: VectorReduceSinkOperator
+                    native: false
+                    nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                    nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+                Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+                value expressions: _col1 (type: float)
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 1
+              includeColumns: [0]
+              dataColumns: c:double
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [double]
+      Reduce Vectorization:
+          enabled: false
+          enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true
+          enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: float)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select c, 0, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 0, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0	0.0
+12345.0	0	12345.0
+12345.54321	0	12345.0
+NULL	0	NULL
+PREHOOK: query: select c, -1, trunc(CAST (c AS FLOAT), -1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, -1, trunc(CAST (c AS FLOAT), -1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	-1	0.0
+12345.0	-1	12340.0
+12345.54321	-1	12340.0
+NULL	-1	NULL
+PREHOOK: query: select c, 1, trunc(CAST (c AS FLOAT), 1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 1, trunc(CAST (c AS FLOAT), 1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	1	0.5
+12345.0	1	12345.0
+12345.54321	1	12345.5
+NULL	1	NULL
+PREHOOK: query: explain vectorization detail
+select c, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail
+select c, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: trunc_number
+            Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:c:double, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: c (type: double), trunc(CAST( c AS decimal(10,5)), 0) (type: decimal(38,18))
+              outputColumnNames: _col0, _col1
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [0, 3]
+                  selectExpressions: TruncDecimal(col 2, scale 0)(children: CastDoubleToDecimal(col 0:double) -> 2:decimal(10,5)) -> 3:decimal(38,18)
+              Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+              Reduce Output Operator
+                key expressions: _col0 (type: double)
+                sort order: +
+                Reduce Sink Vectorization:
+                    className: VectorReduceSinkOperator
+                    native: false
+                    nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                    nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+                Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+                value expressions: _col1 (type: decimal(38,18))
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 1
+              includeColumns: [0]
+              dataColumns: c:double
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [decimal(10,5), decimal(38,18)]
+      Reduce Vectorization:
+          enabled: false
+          enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true
+          enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: decimal(38,18))
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select c, 0, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 0, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0	0.000000000000000000
+12345.0	0	12345.000000000000000000
+12345.54321	0	12345.000000000000000000
+NULL	0	NULL
+PREHOOK: query: select c, -1, trunc(CAST (c AS DECIMAL(10,5)), -1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, -1, trunc(CAST (c AS DECIMAL(10,5)), -1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	-1	0.000000000000000000
+12345.0	-1	12340.000000000000000000
+12345.54321	-1	12340.000000000000000000
+NULL	-1	NULL
+PREHOOK: query: select c, 1, trunc(CAST (c AS DECIMAL(10,5)), 1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 1, trunc(CAST (c AS DECIMAL(10,5)), 1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	1	0.500000000000000000
+12345.0	1	12345.000000000000000000
+12345.54321	1	12345.500000000000000000
+NULL	1	NULL
+PREHOOK: query: explain vectorization detail
+select c, trunc(c) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail
+select c, trunc(c) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: trunc_number
+            Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:c:double, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: c (type: double), trunc(c) (type: double)
+              outputColumnNames: _col0, _col1
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [0, 2]
+                  selectExpressions: TruncFloatNoScale(col 0, scale 0) -> 2:double
+              Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+              Reduce Output Operator
+                key expressions: _col0 (type: double)
+                sort order: +
+                Reduce Sink Vectorization:
+                    className: VectorReduceSinkOperator
+                    native: false
+                    nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                    nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+                Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+                value expressions: _col1 (type: double)
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 1
+              includeColumns: [0]
+              dataColumns: c:double
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [double]
+      Reduce Vectorization:
+          enabled: false
+          enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true
+          enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select c, trunc(c) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, trunc(c) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0.0
+12345.0	12345.0
+12345.54321	12345.0
+NULL	NULL
+PREHOOK: query: explain vectorization detail
+select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail
+select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: trunc_number
+            Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:c:double, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: c (type: double), trunc(CAST( c AS decimal(10,5))) (type: decimal(38,18))
+              outputColumnNames: _col0, _col1
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [0, 3]
+                  selectExpressions: TruncDecimalNoScale(col 2, scale 0)(children: CastDoubleToDecimal(col 0:double) -> 2:decimal(10,5)) -> 3:decimal(38,18)
+              Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+              Reduce Output Operator
+                key expressions: _col0 (type: double)
+                sort order: +
+                Reduce Sink Vectorization:
+                    className: VectorReduceSinkOperator
+                    native: false
+                    nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                    nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+                Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+                value expressions: _col1 (type: decimal(38,18))
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 1
+              includeColumns: [0]
+              dataColumns: c:double
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [decimal(10,5), decimal(38,18)]
+      Reduce Vectorization:
+          enabled: false
+          enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true
+          enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: decimal(38,18))
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0.000000000000000000
+12345.0	12345.000000000000000000
+12345.54321	12345.000000000000000000
+NULL	NULL
+PREHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:43.619
+1969-12-01	1969-12-31 15:59:43.627
+1969-12-01	1969-12-31 15:59:43.628
+1969-12-01	1969-12-31 15:59:43.631
+1969-12-01	1969-12-31 15:59:43.637
+1969-12-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(ctimestamp1, 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:43.619
+1969-10-01	1969-12-31 15:59:43.627
+1969-10-01	1969-12-31 15:59:43.628
+1969-10-01	1969-12-31 15:59:43.631
+1969-10-01	1969-12-31 15:59:43.637
+1969-10-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(ctimestamp1, 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:43.619
+1969-01-01	1969-12-31 15:59:43.627
+1969-01-01	1969-12-31 15:59:43.628
+1969-01-01	1969-12-31 15:59:43.631
+1969-01-01	1969-12-31 15:59:43.637
+1969-01-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:43.619
+1969-12-01	1969-12-31 15:59:43.627
+1969-12-01	1969-12-31 15:59:43.628
+1969-12-01	1969-12-31 15:59:43.631
+1969-12-01	1969-12-31 15:59:43.637
+1969-12-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:43.619
+1969-10-01	1969-12-31 15:59:43.627
+1969-10-01	1969-12-31 15:59:43.628
+1969-10-01	1969-12-31 15:59:43.631
+1969-10-01	1969-12-31 15:59:43.637
+1969-10-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:43.619
+1969-01-01	1969-12-31 15:59:43.627
+1969-01-01	1969-12-31 15:59:43.628
+1969-01-01	1969-12-31 15:59:43.631
+1969-01-01	1969-12-31 15:59:43.637
+1969-01-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:43.619
+1969-12-01	1969-12-31 15:59:43.627
+1969-12-01	1969-12-31 15:59:43.628
+1969-12-01	1969-12-31 15:59:43.631
+1969-12-01	1969-12-31 15:59:43.637
+1969-12-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:43.619
+1969-10-01	1969-12-31 15:59:43.627
+1969-10-01	1969-12-31 15:59:43.628
+1969-10-01	1969-12-31 15:59:43.631
+1969-10-01	1969-12-31 15:59:43.637
+1969-10-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:43.619
+1969-01-01	1969-12-31 15:59:43.627
+1969-01-01	1969-12-31 15:59:43.628
+1969-01-01	1969-12-31 15:59:43.631
+1969-01-01	1969-12-31 15:59:43.637
+1969-01-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc WHERE ctimestamp1 IS NULL LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc WHERE ctimestamp1 IS NULL LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+PREHOOK: query: select c, 0, trunc(c,0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 0, trunc(c,0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0	0.0
+12345.0	0	12345.0
+12345.54321	0	12345.0
+NULL	0	NULL
+PREHOOK: query: select c, -1, trunc(c,-1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, -1, trunc(c,-1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	-1	0.0
+12345.0	-1	12340.0
+12345.54321	-1	12340.0
+NULL	-1	NULL
+PREHOOK: query: select c, 1, trunc(c,1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 1, trunc(c,1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	1	0.5
+12345.0	1	12345.0
+12345.54321	1	12345.5
+NULL	1	NULL
+PREHOOK: query: select c, 0, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 0, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0	0.0
+12345.0	0	12345.0
+12345.54321	0	12345.0
+NULL	0	NULL
+PREHOOK: query: select c, -1, trunc(CAST (c AS FLOAT), -1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, -1, trunc(CAST (c AS FLOAT), -1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	-1	0.0
+12345.0	-1	12340.0
+12345.54321	-1	12340.0
+NULL	-1	NULL
+PREHOOK: query: select c, 1, trunc(CAST (c AS FLOAT), 1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 1, trunc(CAST (c AS FLOAT), 1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	1	0.5
+12345.0	1	12345.0
+12345.54321	1	12345.5
+NULL	1	NULL
+PREHOOK: query: select c, 0, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 0, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0	0.000000000000000000
+12345.0	0	12345.000000000000000000
+12345.54321	0	12345.000000000000000000
+NULL	0	NULL
+PREHOOK: query: select c, -1, trunc(CAST (c AS DECIMAL(10,5)), -1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, -1, trunc(CAST (c AS DECIMAL(10,5)), -1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	-1	0.000000000000000000
+12345.0	-1	12340.000000000000000000
+12345.54321	-1	12340.000000000000000000
+NULL	-1	NULL
+PREHOOK: query: select c, 1, trunc(CAST (c AS DECIMAL(10,5)), 1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 1, trunc(CAST (c AS DECIMAL(10,5)), 1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	1	0.500000000000000000
+12345.0	1	12345.000000000000000000
+12345.54321	1	12345.500000000000000000
+NULL	1	NULL
+PREHOOK: query: select c, trunc(c) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, trunc(c) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0.0
+12345.0	12345.0
+12345.54321	12345.0
+NULL	NULL
+PREHOOK: query: select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0.000000000000000000
+12345.0	12345.000000000000000000
+12345.54321	12345.000000000000000000
+NULL	NULL
+PREHOOK: query: drop table trunc_number
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@trunc_number
+PREHOOK: Output: default@trunc_number
+POSTHOOK: query: drop table trunc_number
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@trunc_number
+POSTHOOK: Output: default@trunc_number