You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kg...@apache.org on 2019/03/27 09:23:32 UTC

[hive] branch master updated (b389484 -> a7a9f51)

This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


    from b389484  HIVE-21455: Too verbose logging in AvroGenericRecordReader (Miklos Szurap, reviewed by David Mollitor and Peter Vary)
     new a7cd99d  HIVE-16255: Support percentile_cont / percentile_disc (Laszlo Bodor via Zoltan Haindrich)
     new a7a9f51  HIVE-15406: Consider vectorizing the new trunc function (Laszlo Bodor via Zoltan Haindrich)

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../hadoop/hive/ql/exec/FunctionRegistry.java      |    2 +
 .../exec/vector/expressions/TruncDateFromDate.java |   39 +
 .../vector/expressions/TruncDateFromString.java    |   51 +
 .../vector/expressions/TruncDateFromTimestamp.java |  164 +++
 .../ql/exec/vector/expressions/TruncDecimal.java   |   54 +
 .../vector/expressions/TruncDecimalNoScale.java    |   29 +
 .../ql/exec/vector/expressions/TruncFloat.java     |  149 +++
 .../exec/vector/expressions/TruncFloatNoScale.java |   29 +
 .../ql/udf/generic/GenericUDAFPercentileCont.java  |  502 ++++++++
 .../ql/udf/generic/GenericUDAFPercentileDisc.java  |  139 ++
 .../hive/ql/udf/generic/GenericUDFTrunc.java       |   13 +-
 .../udf/generic/TestGenericUDAFPercentileCont.java |  181 +++
 .../udf/generic/TestGenericUDAFPercentileDisc.java |  182 +++
 .../queries/clientpositive/udaf_percentile_cont.q  |   69 +
 .../queries/clientpositive/udaf_percentile_disc.q  |   69 +
 .../test/queries/clientpositive/vector_udf_trunc.q |  110 ++
 .../results/clientpositive/show_functions.q.out    |    2 +
 .../clientpositive/udaf_percentile_cont.q.out      |  421 ++++++
 .../clientpositive/udaf_percentile_cont_disc.q.out |  842 ++++++++++++
 .../clientpositive/udaf_percentile_disc.q.out      |  421 ++++++
 .../results/clientpositive/vector_udf_trunc.q.out  | 1343 ++++++++++++++++++++
 21 files changed, 4809 insertions(+), 2 deletions(-)
 create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromDate.java
 create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromString.java
 create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromTimestamp.java
 create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimal.java
 create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimalNoScale.java
 create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloat.java
 create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloatNoScale.java
 create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileCont.java
 create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileDisc.java
 create mode 100644 ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFPercentileCont.java
 create mode 100644 ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFPercentileDisc.java
 create mode 100644 ql/src/test/queries/clientpositive/udaf_percentile_cont.q
 create mode 100644 ql/src/test/queries/clientpositive/udaf_percentile_disc.q
 create mode 100644 ql/src/test/queries/clientpositive/vector_udf_trunc.q
 create mode 100644 ql/src/test/results/clientpositive/udaf_percentile_cont.q.out
 create mode 100644 ql/src/test/results/clientpositive/udaf_percentile_cont_disc.q.out
 create mode 100644 ql/src/test/results/clientpositive/udaf_percentile_disc.q.out
 create mode 100644 ql/src/test/results/clientpositive/vector_udf_trunc.q.out


[hive] 01/02: HIVE-16255: Support percentile_cont / percentile_disc (Laszlo Bodor via Zoltan Haindrich)

Posted by kg...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit a7cd99d8c20ab4b38487fd84afd36d1682310b09
Author: Laszlo Bodor <bo...@gmail.com>
AuthorDate: Wed Mar 27 09:24:39 2019 +0100

    HIVE-16255: Support percentile_cont / percentile_disc (Laszlo Bodor via Zoltan Haindrich)
    
    Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>
---
 .../hadoop/hive/ql/exec/FunctionRegistry.java      |   2 +
 .../ql/udf/generic/GenericUDAFPercentileCont.java  | 502 ++++++++++++
 .../ql/udf/generic/GenericUDAFPercentileDisc.java  | 139 ++++
 .../udf/generic/TestGenericUDAFPercentileCont.java | 181 +++++
 .../udf/generic/TestGenericUDAFPercentileDisc.java | 182 +++++
 .../queries/clientpositive/udaf_percentile_cont.q  |  69 ++
 .../queries/clientpositive/udaf_percentile_disc.q  |  69 ++
 .../results/clientpositive/show_functions.q.out    |   2 +
 .../clientpositive/udaf_percentile_cont.q.out      | 421 +++++++++++
 .../clientpositive/udaf_percentile_cont_disc.q.out | 842 +++++++++++++++++++++
 .../clientpositive/udaf_percentile_disc.q.out      | 421 +++++++++++
 11 files changed, 2830 insertions(+)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index ed41bef..253570b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -473,6 +473,8 @@ public final class FunctionRegistry {
     system.registerGenericUDAF("bloom_filter", new GenericUDAFBloomFilter());
     system.registerGenericUDAF("approx_distinct", new GenericUDAFApproximateDistinct());
     system.registerUDAF("percentile", UDAFPercentile.class);
+    system.registerGenericUDAF("percentile_cont", new GenericUDAFPercentileCont());
+    system.registerGenericUDAF("percentile_disc", new GenericUDAFPercentileDisc());
 
 
     // Generic UDFs
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileCont.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileCont.java
new file mode 100644
index 0000000..72a19bd
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileCont.java
@@ -0,0 +1,502 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.io.LongWritable;
+
+/**
+ * GenericUDAFPercentileCont.
+ */
+@Description(name = "percentile_cont", value = "_FUNC_(input, pc) "
+    + "- Returns the percentile of expr at pc (range: [0,1]).")
+public class GenericUDAFPercentileCont extends AbstractGenericUDAFResolver {
+
+  private static final Comparator<LongWritable> LONG_COMPARATOR;
+  private static final Comparator<DoubleWritable> DOUBLE_COMPARATOR;
+
+  static {
+    LONG_COMPARATOR = ShimLoader.getHadoopShims().getLongComparator();
+    DOUBLE_COMPARATOR = new Comparator<DoubleWritable>() {
+      @Override
+      public int compare(DoubleWritable o1, DoubleWritable o2) {
+        return o1.compareTo(o2);
+      }
+    };
+  }
+
+  @Override
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+    if (parameters.length != 2) {
+      throw new UDFArgumentTypeException(parameters.length - 1, "Exactly 2 argument is expected.");
+    }
+
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+      throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+          + parameters[0].getTypeName() + " is passed.");
+    }
+    switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+    case BYTE:
+    case SHORT:
+    case INT:
+    case LONG:
+    case VOID:
+      return new PercentileContLongEvaluator();
+    case FLOAT:
+    case DOUBLE:
+    case DECIMAL:
+      return new PercentileContDoubleEvaluator();
+    case STRING:
+    case TIMESTAMP:
+    case VARCHAR:
+    case CHAR:
+    case BOOLEAN:
+    case DATE:
+    default:
+      throw new UDFArgumentTypeException(0,
+          "Only numeric arguments are accepted but " + parameters[0].getTypeName() + " is passed.");
+    }
+  }
+
+  /**
+   * A comparator to sort the entries in order - Long.
+   */
+  @SuppressWarnings("serial")
+  public static class LongComparator
+      implements Comparator<Map.Entry<LongWritable, LongWritable>>, Serializable {
+    @Override
+    public int compare(Map.Entry<LongWritable, LongWritable> o1,
+        Map.Entry<LongWritable, LongWritable> o2) {
+      return LONG_COMPARATOR.compare(o1.getKey(), o2.getKey());
+    }
+  }
+
+  /**
+   * A comparator to sort the entries in order - Double.
+   */
+  @SuppressWarnings("serial")
+  public static class DoubleComparator
+      implements Comparator<Map.Entry<DoubleWritable, LongWritable>>, Serializable {
+    @Override
+    public int compare(Map.Entry<DoubleWritable, LongWritable> o1,
+        Map.Entry<DoubleWritable, LongWritable> o2) {
+      return DOUBLE_COMPARATOR.compare(o1.getKey(), o2.getKey());
+    }
+  }
+
+  protected interface PercentileCalculator<U> {
+    double getPercentile(List<Map.Entry<U, LongWritable>> entriesList, double position);
+  }
+
+  /**
+   * An abstract class to hold the generic udf functions for calculating percentile.
+   */
+  public abstract static class PercentileContEvaluator<T, U> extends GenericUDAFEvaluator {
+    PercentileCalculator<U> calc = getCalculator();
+
+    /**
+     * A state class to store intermediate aggregation results.
+     */
+    public class PercentileAgg extends AbstractAggregationBuffer {
+      Map<U, LongWritable> counts;
+      List<DoubleWritable> percentiles;
+    }
+
+    // For PARTIAL1 and COMPLETE
+    protected PrimitiveObjectInspector inputOI;
+    MapObjectInspector countsOI;
+    ListObjectInspector percentilesOI;
+
+    // For PARTIAL1 and PARTIAL2
+    protected transient Object[] partialResult;
+
+    // FINAL and COMPLETE output
+    protected DoubleWritable result;
+
+    // PARTIAL2 and FINAL inputs
+    protected transient StructObjectInspector soi;
+    protected transient StructField countsField;
+    protected transient StructField percentilesField;
+
+    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+      super.init(m, parameters);
+
+      initInspectors(parameters);
+
+      if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {// ...for partial result
+        partialResult = new Object[2];
+
+        ArrayList<ObjectInspector> foi = getPartialInspectors();
+
+        ArrayList<String> fname = new ArrayList<String>();
+        fname.add("counts");
+        fname.add("percentiles");
+
+        return ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+      } else { // ...for final result
+        result = new DoubleWritable(0);
+        return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+      }
+    }
+
+    protected abstract PercentileCalculator<U> getCalculator();
+
+    protected abstract ArrayList<ObjectInspector> getPartialInspectors();
+
+    protected abstract T getInput(Object object, PrimitiveObjectInspector inputOI);
+
+    protected abstract U wrapInput(T input);
+
+    protected abstract U copyInput(U input);
+
+    protected abstract void sortEntries(List<Entry<U, LongWritable>> entriesList);
+
+    protected void initInspectors(ObjectInspector[] parameters) {
+      if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// ...for real input data
+        inputOI = (PrimitiveObjectInspector) parameters[0];
+      } else { // ...for partial result as input
+        soi = (StructObjectInspector) parameters[0];
+
+        countsField = soi.getStructFieldRef("counts");
+        percentilesField = soi.getStructFieldRef("percentiles");
+
+        countsOI = (MapObjectInspector) countsField.getFieldObjectInspector();
+        percentilesOI = (ListObjectInspector) percentilesField.getFieldObjectInspector();
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      PercentileAgg agg = new PercentileAgg();
+      return agg;
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      PercentileAgg percAgg = (PercentileAgg) agg;
+      if (percAgg.counts != null) {
+        percAgg.counts.clear();
+      }
+    }
+
+    @Override
+    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+      PercentileAgg percAgg = (PercentileAgg) agg;
+      Double percentile = ((HiveDecimalWritable) parameters[1]).getHiveDecimal().doubleValue();
+
+      if (percAgg.percentiles == null) {
+        validatePercentile(percentile);
+        percAgg.percentiles = new ArrayList<DoubleWritable>(1);
+        percAgg.percentiles.add(new DoubleWritable(percentile));
+      }
+
+      if (parameters[0] == null) {
+        return;
+      }
+
+      T input = getInput(parameters[0], inputOI);
+
+      if (input != null) {
+        increment(percAgg, wrapInput(input), 1);
+      }
+    }
+
+    protected void increment(PercentileAgg s, U input, long i) {
+      if (s.counts == null) {
+        s.counts = new HashMap<U, LongWritable>();
+      }
+      LongWritable count = s.counts.get(input);
+      if (count == null) {
+        s.counts.put(copyInput(input), new LongWritable(i));
+      } else {
+        count.set(count.get() + i);
+      }
+    }
+
+    @Override
+    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+      if (partial == null) {
+        return;
+      }
+
+      Object objCounts = soi.getStructFieldData(partial, countsField);
+      Object objPercentiles = soi.getStructFieldData(partial, percentilesField);
+
+      Map<U, LongWritable> counts = (Map<U, LongWritable>) countsOI.getMap(objCounts);
+      List<DoubleWritable> percentiles =
+          (List<DoubleWritable>) percentilesOI.getList(objPercentiles);
+
+      if (counts == null || percentiles == null) {
+        return;
+      }
+
+      PercentileAgg percAgg = (PercentileAgg) agg;
+
+      if (percAgg.percentiles == null) {
+        percAgg.percentiles = new ArrayList<DoubleWritable>(percentiles);
+      }
+
+      for (Map.Entry<U, LongWritable> e : counts.entrySet()) {
+        increment(percAgg, e.getKey(), e.getValue().get());
+      }
+    }
+
+    @Override
+    public Object terminate(AggregationBuffer agg) throws HiveException {
+      PercentileAgg percAgg = (PercentileAgg) agg;
+
+      // No input data.
+      if (percAgg.counts == null || percAgg.counts.size() == 0) {
+        return null;
+      }
+
+      // Get all items into an array and sort them.
+      Set<Map.Entry<U, LongWritable>> entries = percAgg.counts.entrySet();
+      List<Map.Entry<U, LongWritable>> entriesList =
+          new ArrayList<Map.Entry<U, LongWritable>>(entries);
+      sortEntries(entriesList);
+
+      // Accumulate the counts.
+      long total = getTotal(entriesList);
+
+      // Initialize the result.
+      if (result == null) {
+        result = new DoubleWritable();
+      }
+
+      calculatePercentile(percAgg, entriesList, total);
+
+      return result;
+    }
+
+    @Override
+    public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+      PercentileAgg percAgg = (PercentileAgg) agg;
+      partialResult[0] = percAgg.counts;
+      partialResult[1] = percAgg.percentiles;
+
+      return partialResult;
+    }
+
+    protected long getTotal(List<Map.Entry<U, LongWritable>> entriesList) {
+      long total = 0;
+      for (int i = 0; i < entriesList.size(); i++) {
+        LongWritable count = entriesList.get(i).getValue();
+        total += count.get();
+        count.set(total);
+      }
+      return total;
+    }
+
+    protected void validatePercentile(Double percentile) {
+      if (percentile < 0.0 || percentile > 1.0) {
+        throw new RuntimeException("Percentile value must be within the range of 0 to 1.");
+      }
+    }
+
+    protected void calculatePercentile(PercentileAgg percAgg,
+        List<Map.Entry<U, LongWritable>> entriesList, long total) {
+      // maxPosition is the 1.0 percentile
+      long maxPosition = total - 1;
+      double position = maxPosition * percAgg.percentiles.get(0).get();
+      result.set(calc.getPercentile(entriesList, position));
+    }
+
+  }
+
+  /**
+   * The evaluator for percentile computation based on long.
+   */
+  public static class PercentileContLongEvaluator
+      extends PercentileContEvaluator<Long, LongWritable> {
+
+    protected ArrayList<ObjectInspector> getPartialInspectors() {
+      ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+
+      foi.add(ObjectInspectorFactory.getStandardMapObjectInspector(
+          PrimitiveObjectInspectorFactory.writableLongObjectInspector,
+          PrimitiveObjectInspectorFactory.writableLongObjectInspector));
+      foi.add(ObjectInspectorFactory.getStandardListObjectInspector(
+          PrimitiveObjectInspectorFactory.writableDoubleObjectInspector));
+      return foi;
+    }
+
+    protected Long getInput(Object parameter, PrimitiveObjectInspector inputOI) {
+      return PrimitiveObjectInspectorUtils.getLong(parameter, inputOI);
+    }
+
+    protected LongWritable wrapInput(Long input) {
+      return new LongWritable(input);
+    }
+
+    protected LongWritable copyInput(LongWritable input) {
+      return new LongWritable(input.get());
+    }
+
+    protected void sortEntries(List<Entry<LongWritable, LongWritable>> entriesList) {
+      Collections.sort(entriesList, new LongComparator());
+    }
+
+    @Override
+    protected PercentileCalculator<LongWritable> getCalculator() {
+      return new PercentileContLongCalculator();
+    }
+  }
+
+  /**
+   * The evaluator for percentile computation based on double.
+   */
+  public static class PercentileContDoubleEvaluator
+      extends PercentileContEvaluator<Double, DoubleWritable> {
+    @Override
+    protected ArrayList<ObjectInspector> getPartialInspectors() {
+      ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+
+      foi.add(ObjectInspectorFactory.getStandardMapObjectInspector(
+          PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
+          PrimitiveObjectInspectorFactory.writableLongObjectInspector));
+      foi.add(ObjectInspectorFactory.getStandardListObjectInspector(
+          PrimitiveObjectInspectorFactory.writableDoubleObjectInspector));
+      return foi;
+    }
+
+    @Override
+    protected Double getInput(Object parameter, PrimitiveObjectInspector inputOI) {
+      return PrimitiveObjectInspectorUtils.getDouble(parameter, inputOI);
+    }
+
+    @Override
+    protected DoubleWritable wrapInput(Double input) {
+      return new DoubleWritable(input);
+    }
+
+    protected DoubleWritable copyInput(DoubleWritable input) {
+      return new DoubleWritable(input.get());
+    }
+
+    protected void sortEntries(List<Entry<DoubleWritable, LongWritable>> entriesList) {
+      Collections.sort(entriesList, new DoubleComparator());
+    }
+
+    @Override
+    protected PercentileCalculator<DoubleWritable> getCalculator() {
+      return new PercentileContDoubleCalculator();
+    }
+  }
+
+  /**
+   * continuous percentile calculators
+   */
+  public static class PercentileContLongCalculator implements PercentileCalculator<LongWritable> {
+    /**
+     * Get the percentile value.
+     */
+    public double getPercentile(List<Entry<LongWritable, LongWritable>> entriesList,
+        double position) {
+      // We may need to do linear interpolation to get the exact percentile
+      long lower = (long) Math.floor(position);
+      long higher = (long) Math.ceil(position);
+
+      // Linear search since this won't take much time from the total execution anyway
+      // lower has the range of [0 .. total-1]
+      // The first entry with accumulated count (lower+1) corresponds to the lower position.
+      int i = 0;
+      while (entriesList.get(i).getValue().get() < lower + 1) {
+        i++;
+      }
+
+      long lowerKey = entriesList.get(i).getKey().get();
+      if (higher == lower) {
+        // no interpolation needed because position does not have a fraction
+        return lowerKey;
+      }
+
+      if (entriesList.get(i).getValue().get() < higher + 1) {
+        i++;
+      }
+      long higherKey = entriesList.get(i).getKey().get();
+
+      if (higherKey == lowerKey) {
+        // no interpolation needed because lower position and higher position has the same key
+        return lowerKey;
+      }
+
+      // Linear interpolation to get the exact percentile
+      return (higher - position) * lowerKey + (position - lower) * higherKey;
+    }
+  }
+
+  public static class PercentileContDoubleCalculator
+      implements PercentileCalculator<DoubleWritable> {
+
+    public double getPercentile(List<Map.Entry<DoubleWritable, LongWritable>> entriesList,
+        double position) {
+      long lower = (long) Math.floor(position);
+      long higher = (long) Math.ceil(position);
+
+      int i = 0;
+      while (entriesList.get(i).getValue().get() < lower + 1) {
+        i++;
+      }
+
+      double lowerKey = entriesList.get(i).getKey().get();
+      if (higher == lower) {
+        return lowerKey;
+      }
+
+      if (entriesList.get(i).getValue().get() < higher + 1) {
+        i++;
+      }
+      double higherKey = entriesList.get(i).getKey().get();
+
+      if (higherKey == lowerKey) {
+        return lowerKey;
+      }
+
+      return (higher - position) * lowerKey + (position - lower) * higherKey;
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileDisc.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileDisc.java
new file mode 100644
index 0000000..3ac336e
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileDisc.java
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+
+/**
+ * GenericUDAFPercentileDisc.
+ */
+@Description(name = "percentile_disc", value = "_FUNC_(input, pc) - "
+    + "Returns the percentile of expr at pc (range: [0,1]) without interpolation.")
+public class GenericUDAFPercentileDisc extends GenericUDAFPercentileCont {
+
+  @Override
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+    if (parameters.length != 2) {
+      throw new UDFArgumentTypeException(parameters.length - 1, "Exactly 2 argument is expected.");
+    }
+
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+      throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+          + parameters[0].getTypeName() + " is passed.");
+    }
+    switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+    case BYTE:
+    case SHORT:
+    case INT:
+    case LONG:
+    case VOID:
+      return new PercentileDiscLongEvaluator();
+    case FLOAT:
+    case DOUBLE:
+    case DECIMAL:
+      return new PercentileDiscDoubleEvaluator();
+    case STRING:
+    case TIMESTAMP:
+    case VARCHAR:
+    case CHAR:
+    case BOOLEAN:
+    case DATE:
+    default:
+      throw new UDFArgumentTypeException(0,
+          "Only numeric arguments are accepted but " + parameters[0].getTypeName() + " is passed.");
+    }
+  }
+
+  /**
+   * The evaluator for discrete percentile computation based on long.
+   */
+  public static class PercentileDiscLongEvaluator extends PercentileContLongEvaluator {
+    @Override
+    protected PercentileCalculator<LongWritable> getCalculator() {
+      return new PercentileDiscLongCalculator();
+    }
+  }
+
+  /**
+   * The evaluator for discrete percentile computation based on double.
+   */
+  public static class PercentileDiscDoubleEvaluator extends PercentileContDoubleEvaluator {
+    @Override
+    protected PercentileCalculator<DoubleWritable> getCalculator() {
+      return new PercentileDiscDoubleCalculator();
+    }
+  }
+
+  public static class PercentileDiscLongCalculator implements PercentileCalculator<LongWritable> {
+    public double getPercentile(List<Map.Entry<LongWritable, LongWritable>> entriesList,
+        double position) {
+      long lower = (long) Math.floor(position);
+      long higher = (long) Math.ceil(position);
+
+      int i = 0;
+      while (entriesList.get(i).getValue().get() < lower + 1) {
+        i++;
+      }
+
+      long lowerKey = entriesList.get(i).getKey().get();
+      if (higher == lower) {
+        return lowerKey;
+      }
+
+      if (entriesList.get(i).getValue().get() < higher + 1) {
+        i++;
+      }
+      return entriesList.get(i).getKey().get();
+    }
+  }
+
+  public static class PercentileDiscDoubleCalculator
+      implements PercentileCalculator<DoubleWritable> {
+    public double getPercentile(List<Map.Entry<DoubleWritable, LongWritable>> entriesList,
+        double position) {
+      long lower = (long) Math.floor(position);
+      long higher = (long) Math.ceil(position);
+
+      int i = 0;
+      while (entriesList.get(i).getValue().get() < lower + 1) {
+        i++;
+      }
+
+      double lowerKey = entriesList.get(i).getKey().get();
+      if (higher == lower) {
+        return lowerKey;
+      }
+
+      if (entriesList.get(i).getValue().get() < higher + 1) {
+        i++;
+      }
+      return entriesList.get(i).getKey().get();
+    }
+  }
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFPercentileCont.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFPercentileCont.java
new file mode 100644
index 0000000..1a7aaca
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFPercentileCont.java
@@ -0,0 +1,181 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.util.ArrayList;
+
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileCont.PercentileContDoubleEvaluator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileCont.PercentileContEvaluator.PercentileAgg;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileCont.PercentileContLongCalculator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileCont.PercentileContLongEvaluator;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Test class for GenericUDAFPercentileCont.
+ */
+public class TestGenericUDAFPercentileCont {
+  PercentileContLongCalculator calc = new PercentileContLongCalculator();
+
+  // Long type tests
+  @Test
+  public void testNoInterpolation() throws Exception {
+    Long[] items = new Long[] {1L, 2L, 3L, 4L, 5L };
+    checkPercentile(items, 0.5, 3);
+  }
+
+  @Test
+  public void testInterpolateLower() throws Exception {
+    Long[] items = new Long[] {1L, 2L, 3L, 4L, 5L };
+    checkPercentile(items, 0.49, 2.96);
+  }
+
+  @Test
+  public void testInterpolateHigher() throws Exception {
+    Long[] items = new Long[] {1L, 2L, 3L, 4L, 5L };
+    checkPercentile(items, 0.51, 3.04);
+  }
+
+  @Test
+  public void testSingleItem50() throws Exception {
+    Long[] items = new Long[] {1L };
+    checkPercentile(items, 0.5, 1);
+  }
+
+  @Test
+  public void testSingleItem100() throws Exception {
+    Long[] items = new Long[] {1L };
+    checkPercentile(items, 1, 1);
+  }
+
+  /*
+   * POSTGRES check: WITH vals (k) AS (VALUES (54), (35), (15), (15), (76), (87), (78)) SELECT *
+   * INTO table percentile_src FROM vals; select percentile_cont(.50) within group (order by k) as
+   * perc from percentile_src;
+   */
+  @Test
+  public void testPostgresRefExample() throws Exception {
+    Long[] items = new Long[] {54L, 35L, 15L, 15L, 76L, 87L, 78L };
+    checkPercentile(items, 0.5, 54);
+  }
+
+  /*
+   * POSTGRES check: WITH vals (k) AS (VALUES (54), (35), (15), (15), (76), (87), (78)) SELECT *
+   * INTO table percentile_src FROM vals; select percentile_cont(.72) within group (order by k) as
+   * perc from percentile_src;
+   */
+  @Test
+  public void testPostgresRefExample2() throws Exception {
+    Long[] items = new Long[] {54L, 35L, 15L, 15L, 76L, 87L, 78L };
+    checkPercentile(items, 0.72, 76.64);
+  }
+
+  // Double type tests
+  @Test
+  public void testDoubleNoInterpolation() throws Exception {
+    Double[] items = new Double[] {1.0, 2.0, 3.0, 4.0, 5.0 };
+    checkPercentile(items, 0.5, 3);
+  }
+
+  @Test
+  public void testDoubleInterpolateLower() throws Exception {
+    Double[] items = new Double[] {1.0, 2.0, 3.0, 4.0, 5.0 };
+    checkPercentile(items, 0.49, 2.96);
+  }
+
+  @Test
+  public void testDoubleInterpolateHigher() throws Exception {
+    Double[] items = new Double[] {1.0, 2.0, 3.0, 4.0, 5.0 };
+    checkPercentile(items, 0.51, 3.04);
+  }
+
+  @Test
+  public void testDoubleSingleItem50() throws Exception {
+    Double[] items = new Double[] {1.0 };
+    checkPercentile(items, 0.5, 1);
+  }
+
+  @Test
+  public void testDoubleSingleItem100() throws Exception {
+    Double[] items = new Double[] {1.0 };
+    checkPercentile(items, 1, 1);
+  }
+
+  /*
+   * POSTGRES check: WITH vals (k) AS (VALUES (54.0), (35.0), (15.0), (15.0), (76.0), (87.0),
+   * (78.0)) SELECT * INTO table percentile_src FROM vals; select percentile_cont(.50) within group
+   * (order by k) as perc from percentile_src;
+   */
+  @Test
+  public void testDoublePostgresRefExample() throws Exception {
+    Double[] items = new Double[] {54.0, 35.0, 15.0, 15.0, 76.0, 87.0, 78.0 };
+    checkPercentile(items, 0.5, 54);
+  }
+
+  /*
+   * POSTGRES check: WITH vals (k) AS (VALUES (54.5), (35.3), (15.7), (15.7), (76.8), (87.34),
+   * (78.0)) SELECT * INTO table percentile_src FROM vals; select percentile_cont(.72) within group
+   * (order by k) as perc from percentile_src;
+   */
+  @Test
+  public void testDoublePostgresRefExample2() throws Exception {
+    Double[] items = new Double[] {54.5, 35.3, 15.7, 15.7, 76.8, 87.34, 78.0 };
+    checkPercentile(items, 0.72, 77.184);
+  }
+
+  @SuppressWarnings({ "unchecked", "resource" })
+  private void checkPercentile(Long[] items, double percentile, double expected) throws Exception {
+    PercentileContLongEvaluator eval = new GenericUDAFPercentileCont.PercentileContLongEvaluator();
+
+    PercentileAgg agg = new PercentileContLongEvaluator().new PercentileAgg();
+
+    agg.percentiles = new ArrayList<DoubleWritable>();
+    agg.percentiles.add(new DoubleWritable(percentile));
+
+    for (int i = 0; i < items.length; i++) {
+      eval.increment(agg, new LongWritable(items[i]), 1);
+    }
+
+    DoubleWritable result = (DoubleWritable) eval.terminate(agg);
+
+    Assert.assertEquals(expected, result.get(), 0.01);
+    eval.close();
+  }
+
+  @SuppressWarnings({ "unchecked", "resource" })
+  private void checkPercentile(Double[] items, double percentile, double expected) throws Exception {
+    PercentileContDoubleEvaluator eval = new GenericUDAFPercentileCont.PercentileContDoubleEvaluator();
+
+    PercentileAgg agg = new PercentileContLongEvaluator().new PercentileAgg();
+
+    agg.percentiles = new ArrayList<DoubleWritable>();
+    agg.percentiles.add(new DoubleWritable(percentile));
+
+    for (int i = 0; i < items.length; i++) {
+      eval.increment(agg, new DoubleWritable(items[i]), 1);
+    }
+
+    DoubleWritable result = (DoubleWritable) eval.terminate(agg);
+
+    Assert.assertEquals(expected, result.get(), 0.01);
+    eval.close();
+  }
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFPercentileDisc.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFPercentileDisc.java
new file mode 100644
index 0000000..1a1b3b0
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFPercentileDisc.java
@@ -0,0 +1,182 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.util.ArrayList;
+
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileCont.PercentileContEvaluator.PercentileAgg;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileCont.PercentileContLongEvaluator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileDisc.PercentileDiscDoubleEvaluator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileDisc.PercentileDiscLongCalculator;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Test class for GenericUDAFPercentileDisc.
+ */
+public class TestGenericUDAFPercentileDisc {
+  PercentileDiscLongCalculator calc = new PercentileDiscLongCalculator();
+
+  // Long type tests
+  @Test
+  public void testNoInterpolation() throws Exception {
+    Long[] items = new Long[] {1L, 2L, 3L, 4L, 5L };
+    checkPercentile(items, 0.5, 3);
+  }
+
+  @Test
+  public void testInterpolateLower() throws Exception {
+    Long[] items = new Long[] {1L, 2L, 3L, 4L, 5L };
+    checkPercentile(items, 0.49, 3.0);
+  }
+
+  @Test
+  public void testInterpolateHigher() throws Exception {
+    Long[] items = new Long[] {1L, 2L, 3L, 4L, 5L };
+    checkPercentile(items, 0.51, 4.0);
+  }
+
+  @Test
+  public void testSingleItem50() throws Exception {
+    Long[] items = new Long[] {1L };
+    checkPercentile(items, 0.5, 1);
+  }
+
+  @Test
+  public void testSingleItem100() throws Exception {
+    Long[] items = new Long[] {1L };
+    checkPercentile(items, 1, 1);
+  }
+
+  /*
+   * POSTGRES check: WITH vals (k) AS (VALUES (54), (35), (15), (15), (76), (87), (78)) SELECT *
+   * INTO table percentile_src FROM vals; select percentile_disc(.50) within group (order by k) as
+   * perc from percentile_src;
+   */
+  @Test
+  public void testPostgresRefExample() throws Exception {
+    Long[] items = new Long[] {54L, 35L, 15L, 15L, 76L, 87L, 78L };
+    checkPercentile(items, 0.5, 54);
+  }
+
+  /*
+   * POSTGRES check: WITH vals (k) AS (VALUES (54), (35), (15), (15), (76), (87), (78)) SELECT *
+   * INTO table percentile_src FROM vals; select percentile_disc(.72) within group (order by k) as
+   * perc from percentile_src;
+   */
+  @Test
+  public void testPostgresRefExample2() throws Exception {
+    Long[] items = new Long[] {54L, 35L, 15L, 15L, 76L, 87L, 78L };
+    checkPercentile(items, 0.72, 78);
+  }
+
+  // Long type tests
+  @Test
+  public void testDoubleNoInterpolation() throws Exception {
+    Double[] items = new Double[] {1.0, 2.0, 3.0, 4.0, 5.0 };
+    checkPercentile(items, 0.5, 3);
+  }
+
+  @Test
+  public void testDoubleInterpolateLower() throws Exception {
+    Double[] items = new Double[] {1.0, 2.0, 3.0, 4.0, 5.0 };
+    checkPercentile(items, 0.49, 3.0);
+  }
+
+  @Test
+  public void testDoubleInterpolateHigher() throws Exception {
+    Double[] items = new Double[] {1.0, 2.0, 3.0, 4.0, 5.0 };
+    checkPercentile(items, 0.51, 4.0);
+  }
+
+  @Test
+  public void testDoubleSingleItem50() throws Exception {
+    Double[] items = new Double[] {1.0 };
+    checkPercentile(items, 0.5, 1);
+  }
+
+  @Test
+  public void testDoubleSingleItem100() throws Exception {
+    Double[] items = new Double[] {1.0 };
+    checkPercentile(items, 1, 1);
+  }
+
+  /*
+   * POSTGRES check: WITH vals (k) AS (VALUES (54.0), (35.0), (15.0), (15.0), (76.0), (87.0),
+   * (78.0)) SELECT * INTO table percentile_src FROM vals; select percentile_disc(.50) within group
+   * (order by k) as perc from percentile_src;
+   */
+  @Test
+  public void testDoublePostgresRefExample() throws Exception {
+    Double[] items = new Double[] {54.0, 35.0, 15.0, 15.0, 76.0, 87.0, 78.0 };
+    checkPercentile(items, 0.5, 54);
+  }
+
+  /*
+   * POSTGRES check: WITH vals (k) AS (VALUES (54.5), (35.3), (15.7), (15.7), (76.8), (87.34),
+   * (78.0)) SELECT * INTO table percentile_src FROM vals; select percentile_disc(.72) within group
+   * (order by k) as perc from percentile_src;
+   */
+  @Test
+  public void testDoublePostgresRefExample2() throws Exception {
+    Double[] items = new Double[] {54.5, 35.3, 15.7, 15.7, 76.8, 87.34, 78.0 };
+    checkPercentile(items, 0.72, 78.0);
+  }
+
+  private void checkPercentile(Long[] items, double percentile, double expected) throws Exception {
+    PercentileContLongEvaluator eval = new GenericUDAFPercentileDisc.PercentileDiscLongEvaluator();
+
+    PercentileAgg agg = new PercentileContLongEvaluator().new PercentileAgg();
+
+    agg.percentiles = new ArrayList<DoubleWritable>();
+    agg.percentiles.add(new DoubleWritable(percentile));
+
+    for (int i = 0; i < items.length; i++) {
+      eval.increment(agg, new LongWritable(items[i]), 1);
+    }
+
+    DoubleWritable result = (DoubleWritable) eval.terminate(agg);
+
+    Assert.assertEquals(expected, result.get(), 0.01);
+    eval.close();
+  }
+
+  @SuppressWarnings({ "unchecked", "resource" })
+  private void checkPercentile(Double[] items, double percentile, double expected)
+      throws Exception {
+    PercentileDiscDoubleEvaluator eval =
+        new GenericUDAFPercentileDisc.PercentileDiscDoubleEvaluator();
+
+    PercentileAgg agg = new PercentileDiscDoubleEvaluator().new PercentileAgg();
+
+    agg.percentiles = new ArrayList<DoubleWritable>();
+    agg.percentiles.add(new DoubleWritable(percentile));
+
+    for (int i = 0; i < items.length; i++) {
+      eval.increment(agg, new DoubleWritable(items[i]), 1);
+    }
+
+    DoubleWritable result = (DoubleWritable) eval.terminate(agg);
+
+    Assert.assertEquals(expected, result.get(), 0.01);
+    eval.close();
+  }
+}
diff --git a/ql/src/test/queries/clientpositive/udaf_percentile_cont.q b/ql/src/test/queries/clientpositive/udaf_percentile_cont.q
new file mode 100644
index 0000000..6d788c1
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/udaf_percentile_cont.q
@@ -0,0 +1,69 @@
+--! qt:dataset:src
+DESCRIBE FUNCTION percentile_cont;
+DESCRIBE FUNCTION EXTENDED percentile_cont;
+
+
+set hive.map.aggr = false;
+set hive.groupby.skewindata = false;
+
+-- SORT_QUERY_RESULTS
+
+SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10;
+
+
+set hive.map.aggr = true;
+set hive.groupby.skewindata = false;
+
+SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10;
+
+
+
+set hive.map.aggr = false;
+set hive.groupby.skewindata = true;
+
+SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10;
+
+
+set hive.map.aggr = true;
+set hive.groupby.skewindata = true;
+
+SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10;
+
+
+set hive.map.aggr = true;
+set hive.groupby.skewindata = false;
+
+-- test null handling
+SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(NULL, 0.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10;
+
+
+-- test empty array handling
+SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(IF(CAST(key AS INT) DIV 10 < 5, 1, NULL), 0.5)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10;
+
+select percentile_cont(cast(key as bigint), 0.5) from src where false;
diff --git a/ql/src/test/queries/clientpositive/udaf_percentile_disc.q b/ql/src/test/queries/clientpositive/udaf_percentile_disc.q
new file mode 100644
index 0000000..7ba703e
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/udaf_percentile_disc.q
@@ -0,0 +1,69 @@
+--! qt:dataset:src
+DESCRIBE FUNCTION percentile_disc;
+DESCRIBE FUNCTION EXTENDED percentile_disc;
+
+
+set hive.map.aggr = false;
+set hive.groupby.skewindata = false;
+
+-- SORT_QUERY_RESULTS
+
+SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10;
+
+
+set hive.map.aggr = true;
+set hive.groupby.skewindata = false;
+
+SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10;
+
+
+
+set hive.map.aggr = false;
+set hive.groupby.skewindata = true;
+
+SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10;
+
+
+set hive.map.aggr = true;
+set hive.groupby.skewindata = true;
+
+SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10;
+
+
+set hive.map.aggr = true;
+set hive.groupby.skewindata = false;
+
+-- test null handling
+SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(NULL, 0.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10;
+
+
+-- test empty array handling
+SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(IF(CAST(key AS INT) DIV 10 < 5, 1, NULL), 0.5)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10;
+
+select percentile_disc(cast(key as bigint), 0.5) from src where false;
diff --git a/ql/src/test/results/clientpositive/show_functions.q.out b/ql/src/test/results/clientpositive/show_functions.q.out
index 0b772f0..4e44753 100644
--- a/ql/src/test/results/clientpositive/show_functions.q.out
+++ b/ql/src/test/results/clientpositive/show_functions.q.out
@@ -194,6 +194,8 @@ parse_url_tuple
 percent_rank
 percentile
 percentile_approx
+percentile_cont
+percentile_disc
 pi
 pmod
 posexplode
diff --git a/ql/src/test/results/clientpositive/udaf_percentile_cont.q.out b/ql/src/test/results/clientpositive/udaf_percentile_cont.q.out
new file mode 100644
index 0000000..dda6ce5
--- /dev/null
+++ b/ql/src/test/results/clientpositive/udaf_percentile_cont.q.out
@@ -0,0 +1,421 @@
+PREHOOK: query: DESCRIBE FUNCTION percentile_cont
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION percentile_cont
+POSTHOOK: type: DESCFUNCTION
+percentile_cont(input, pc) - Returns the percentile of expr at pc (range: [0,1]).
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED percentile_cont
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED percentile_cont
+POSTHOOK: type: DESCFUNCTION
+percentile_cont(input, pc) - Returns the percentile of expr at pc (range: [0,1]).
+Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileCont
+Function type:BUILTIN
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	4.5	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	117.0	119.0
+12	120.0	127.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	154.0	158.0
+16	160.0	166.5	169.0
+17	170.0	175.0	179.0
+18	180.0	186.5	189.0
+19	190.0	194.5	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	216.5	219.0
+22	221.0	224.0	229.0
+23	230.0	234.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	264.0	266.0
+27	272.0	275.0	278.0
+28	280.0	283.5	289.0
+29	291.0	297.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	324.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	376.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	42.5	47.0
+40	400.0	403.5	409.0
+41	411.0	415.5	419.0
+42	421.0	425.5	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	467.5	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	494.5	498.0
+5	51.0	54.0	58.0
+6	64.0	66.5	69.0
+7	70.0	73.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	4.5	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	117.0	119.0
+12	120.0	127.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	154.0	158.0
+16	160.0	166.5	169.0
+17	170.0	175.0	179.0
+18	180.0	186.5	189.0
+19	190.0	194.5	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	216.5	219.0
+22	221.0	224.0	229.0
+23	230.0	234.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	264.0	266.0
+27	272.0	275.0	278.0
+28	280.0	283.5	289.0
+29	291.0	297.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	324.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	376.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	42.5	47.0
+40	400.0	403.5	409.0
+41	411.0	415.5	419.0
+42	421.0	425.5	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	467.5	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	494.5	498.0
+5	51.0	54.0	58.0
+6	64.0	66.5	69.0
+7	70.0	73.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	4.5	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	117.0	119.0
+12	120.0	127.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	154.0	158.0
+16	160.0	166.5	169.0
+17	170.0	175.0	179.0
+18	180.0	186.5	189.0
+19	190.0	194.5	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	216.5	219.0
+22	221.0	224.0	229.0
+23	230.0	234.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	264.0	266.0
+27	272.0	275.0	278.0
+28	280.0	283.5	289.0
+29	291.0	297.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	324.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	376.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	42.5	47.0
+40	400.0	403.5	409.0
+41	411.0	415.5	419.0
+42	421.0	425.5	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	467.5	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	494.5	498.0
+5	51.0	54.0	58.0
+6	64.0	66.5	69.0
+7	70.0	73.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	4.5	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	117.0	119.0
+12	120.0	127.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	154.0	158.0
+16	160.0	166.5	169.0
+17	170.0	175.0	179.0
+18	180.0	186.5	189.0
+19	190.0	194.5	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	216.5	219.0
+22	221.0	224.0	229.0
+23	230.0	234.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	264.0	266.0
+27	272.0	275.0	278.0
+28	280.0	283.5	289.0
+29	291.0	297.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	324.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	376.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	42.5	47.0
+40	400.0	403.5	409.0
+41	411.0	415.5	419.0
+42	421.0	425.5	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	467.5	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	494.5	498.0
+5	51.0	54.0	58.0
+6	64.0	66.5	69.0
+7	70.0	73.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(NULL, 0.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(NULL, 0.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	NULL
+1	NULL
+10	NULL
+11	NULL
+12	NULL
+13	NULL
+14	NULL
+15	NULL
+16	NULL
+17	NULL
+18	NULL
+19	NULL
+2	NULL
+20	NULL
+21	NULL
+22	NULL
+23	NULL
+24	NULL
+25	NULL
+26	NULL
+27	NULL
+28	NULL
+29	NULL
+3	NULL
+30	NULL
+31	NULL
+32	NULL
+33	NULL
+34	NULL
+35	NULL
+36	NULL
+37	NULL
+38	NULL
+39	NULL
+4	NULL
+40	NULL
+41	NULL
+42	NULL
+43	NULL
+44	NULL
+45	NULL
+46	NULL
+47	NULL
+48	NULL
+49	NULL
+5	NULL
+6	NULL
+7	NULL
+8	NULL
+9	NULL
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(IF(CAST(key AS INT) DIV 10 < 5, 1, NULL), 0.5)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(IF(CAST(key AS INT) DIV 10 < 5, 1, NULL), 0.5)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	1.0
+1	1.0
+10	NULL
+11	NULL
+12	NULL
+13	NULL
+14	NULL
+15	NULL
+16	NULL
+17	NULL
+18	NULL
+19	NULL
+2	1.0
+20	NULL
+21	NULL
+22	NULL
+23	NULL
+24	NULL
+25	NULL
+26	NULL
+27	NULL
+28	NULL
+29	NULL
+3	1.0
+30	NULL
+31	NULL
+32	NULL
+33	NULL
+34	NULL
+35	NULL
+36	NULL
+37	NULL
+38	NULL
+39	NULL
+4	1.0
+40	NULL
+41	NULL
+42	NULL
+43	NULL
+44	NULL
+45	NULL
+46	NULL
+47	NULL
+48	NULL
+49	NULL
+5	NULL
+6	NULL
+7	NULL
+8	NULL
+9	NULL
+PREHOOK: query: select percentile_cont(cast(key as bigint), 0.5) from src where false
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select percentile_cont(cast(key as bigint), 0.5) from src where false
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+NULL
diff --git a/ql/src/test/results/clientpositive/udaf_percentile_cont_disc.q.out b/ql/src/test/results/clientpositive/udaf_percentile_cont_disc.q.out
new file mode 100644
index 0000000..7400d0b
--- /dev/null
+++ b/ql/src/test/results/clientpositive/udaf_percentile_cont_disc.q.out
@@ -0,0 +1,842 @@
+PREHOOK: query: DESCRIBE FUNCTION percentile_cont
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION percentile_cont
+POSTHOOK: type: DESCFUNCTION
+percentile_cont(input, pc) - Returns the percentile of expr at pc (range: [0,1]).
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED percentile_cont
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED percentile_cont
+POSTHOOK: type: DESCFUNCTION
+percentile_cont(input, pc) - Returns the percentile of expr at pc (range: [0,1]).
+Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileCont
+Function type:BUILTIN
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	4.5	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	117.0	119.0
+12	120.0	127.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	154.0	158.0
+16	160.0	166.5	169.0
+17	170.0	175.0	179.0
+18	180.0	186.5	189.0
+19	190.0	194.5	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	216.5	219.0
+22	221.0	224.0	229.0
+23	230.0	234.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	264.0	266.0
+27	272.0	275.0	278.0
+28	280.0	283.5	289.0
+29	291.0	297.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	324.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	376.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	42.5	47.0
+40	400.0	403.5	409.0
+41	411.0	415.5	419.0
+42	421.0	425.5	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	467.5	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	494.5	498.0
+5	51.0	54.0	58.0
+6	64.0	66.5	69.0
+7	70.0	73.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	4.5	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	117.0	119.0
+12	120.0	127.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	154.0	158.0
+16	160.0	166.5	169.0
+17	170.0	175.0	179.0
+18	180.0	186.5	189.0
+19	190.0	194.5	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	216.5	219.0
+22	221.0	224.0	229.0
+23	230.0	234.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	264.0	266.0
+27	272.0	275.0	278.0
+28	280.0	283.5	289.0
+29	291.0	297.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	324.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	376.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	42.5	47.0
+40	400.0	403.5	409.0
+41	411.0	415.5	419.0
+42	421.0	425.5	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	467.5	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	494.5	498.0
+5	51.0	54.0	58.0
+6	64.0	66.5	69.0
+7	70.0	73.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	4.5	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	117.0	119.0
+12	120.0	127.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	154.0	158.0
+16	160.0	166.5	169.0
+17	170.0	175.0	179.0
+18	180.0	186.5	189.0
+19	190.0	194.5	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	216.5	219.0
+22	221.0	224.0	229.0
+23	230.0	234.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	264.0	266.0
+27	272.0	275.0	278.0
+28	280.0	283.5	289.0
+29	291.0	297.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	324.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	376.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	42.5	47.0
+40	400.0	403.5	409.0
+41	411.0	415.5	419.0
+42	421.0	425.5	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	467.5	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	494.5	498.0
+5	51.0	54.0	58.0
+6	64.0	66.5	69.0
+7	70.0	73.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_cont(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_cont(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	4.5	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	117.0	119.0
+12	120.0	127.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	154.0	158.0
+16	160.0	166.5	169.0
+17	170.0	175.0	179.0
+18	180.0	186.5	189.0
+19	190.0	194.5	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	216.5	219.0
+22	221.0	224.0	229.0
+23	230.0	234.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	264.0	266.0
+27	272.0	275.0	278.0
+28	280.0	283.5	289.0
+29	291.0	297.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	324.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	376.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	42.5	47.0
+40	400.0	403.5	409.0
+41	411.0	415.5	419.0
+42	421.0	425.5	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	467.5	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	494.5	498.0
+5	51.0	54.0	58.0
+6	64.0	66.5	69.0
+7	70.0	73.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(NULL, 0.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(NULL, 0.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	NULL
+1	NULL
+10	NULL
+11	NULL
+12	NULL
+13	NULL
+14	NULL
+15	NULL
+16	NULL
+17	NULL
+18	NULL
+19	NULL
+2	NULL
+20	NULL
+21	NULL
+22	NULL
+23	NULL
+24	NULL
+25	NULL
+26	NULL
+27	NULL
+28	NULL
+29	NULL
+3	NULL
+30	NULL
+31	NULL
+32	NULL
+33	NULL
+34	NULL
+35	NULL
+36	NULL
+37	NULL
+38	NULL
+39	NULL
+4	NULL
+40	NULL
+41	NULL
+42	NULL
+43	NULL
+44	NULL
+45	NULL
+46	NULL
+47	NULL
+48	NULL
+49	NULL
+5	NULL
+6	NULL
+7	NULL
+8	NULL
+9	NULL
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(IF(CAST(key AS INT) DIV 10 < 5, 1, NULL), 0.5)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_cont(IF(CAST(key AS INT) DIV 10 < 5, 1, NULL), 0.5)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	1.0
+1	1.0
+10	NULL
+11	NULL
+12	NULL
+13	NULL
+14	NULL
+15	NULL
+16	NULL
+17	NULL
+18	NULL
+19	NULL
+2	1.0
+20	NULL
+21	NULL
+22	NULL
+23	NULL
+24	NULL
+25	NULL
+26	NULL
+27	NULL
+28	NULL
+29	NULL
+3	1.0
+30	NULL
+31	NULL
+32	NULL
+33	NULL
+34	NULL
+35	NULL
+36	NULL
+37	NULL
+38	NULL
+39	NULL
+4	1.0
+40	NULL
+41	NULL
+42	NULL
+43	NULL
+44	NULL
+45	NULL
+46	NULL
+47	NULL
+48	NULL
+49	NULL
+5	NULL
+6	NULL
+7	NULL
+8	NULL
+9	NULL
+PREHOOK: query: select percentile_cont(cast(key as bigint), 0.5) from src where false
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select percentile_cont(cast(key as bigint), 0.5) from src where false
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+NULL
+PREHOOK: query: DESCRIBE FUNCTION percentile_disc
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION percentile_disc
+POSTHOOK: type: DESCFUNCTION
+There is no documentation for function 'percentile_disc'
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED percentile_disc
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED percentile_disc
+POSTHOOK: type: DESCFUNCTION
+There is no documentation for function 'percentile_disc'
+Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileDisc
+Function type:BUILTIN
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	4.5	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	117.0	119.0
+12	120.0	127.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	154.0	158.0
+16	160.0	166.5	169.0
+17	170.0	175.0	179.0
+18	180.0	186.5	189.0
+19	190.0	194.5	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	216.5	219.0
+22	221.0	224.0	229.0
+23	230.0	234.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	264.0	266.0
+27	272.0	275.0	278.0
+28	280.0	283.5	289.0
+29	291.0	297.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	324.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	376.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	42.5	47.0
+40	400.0	403.5	409.0
+41	411.0	415.5	419.0
+42	421.0	425.5	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	467.5	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	494.5	498.0
+5	51.0	54.0	58.0
+6	64.0	66.5	69.0
+7	70.0	73.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	4.5	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	117.0	119.0
+12	120.0	127.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	154.0	158.0
+16	160.0	166.5	169.0
+17	170.0	175.0	179.0
+18	180.0	186.5	189.0
+19	190.0	194.5	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	216.5	219.0
+22	221.0	224.0	229.0
+23	230.0	234.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	264.0	266.0
+27	272.0	275.0	278.0
+28	280.0	283.5	289.0
+29	291.0	297.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	324.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	376.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	42.5	47.0
+40	400.0	403.5	409.0
+41	411.0	415.5	419.0
+42	421.0	425.5	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	467.5	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	494.5	498.0
+5	51.0	54.0	58.0
+6	64.0	66.5	69.0
+7	70.0	73.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	4.5	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	117.0	119.0
+12	120.0	127.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	154.0	158.0
+16	160.0	166.5	169.0
+17	170.0	175.0	179.0
+18	180.0	186.5	189.0
+19	190.0	194.5	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	216.5	219.0
+22	221.0	224.0	229.0
+23	230.0	234.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	264.0	266.0
+27	272.0	275.0	278.0
+28	280.0	283.5	289.0
+29	291.0	297.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	324.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	376.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	42.5	47.0
+40	400.0	403.5	409.0
+41	411.0	415.5	419.0
+42	421.0	425.5	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	467.5	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	494.5	498.0
+5	51.0	54.0	58.0
+6	64.0	66.5	69.0
+7	70.0	73.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS INT), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	4.5	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	117.0	119.0
+12	120.0	127.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	154.0	158.0
+16	160.0	166.5	169.0
+17	170.0	175.0	179.0
+18	180.0	186.5	189.0
+19	190.0	194.5	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	216.5	219.0
+22	221.0	224.0	229.0
+23	230.0	234.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	264.0	266.0
+27	272.0	275.0	278.0
+28	280.0	283.5	289.0
+29	291.0	297.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	324.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	376.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	42.5	47.0
+40	400.0	403.5	409.0
+41	411.0	415.5	419.0
+42	421.0	425.5	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	467.5	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	494.5	498.0
+5	51.0	54.0	58.0
+6	64.0	66.5	69.0
+7	70.0	73.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(NULL, 0.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(NULL, 0.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	NULL
+1	NULL
+10	NULL
+11	NULL
+12	NULL
+13	NULL
+14	NULL
+15	NULL
+16	NULL
+17	NULL
+18	NULL
+19	NULL
+2	NULL
+20	NULL
+21	NULL
+22	NULL
+23	NULL
+24	NULL
+25	NULL
+26	NULL
+27	NULL
+28	NULL
+29	NULL
+3	NULL
+30	NULL
+31	NULL
+32	NULL
+33	NULL
+34	NULL
+35	NULL
+36	NULL
+37	NULL
+38	NULL
+39	NULL
+4	NULL
+40	NULL
+41	NULL
+42	NULL
+43	NULL
+44	NULL
+45	NULL
+46	NULL
+47	NULL
+48	NULL
+49	NULL
+5	NULL
+6	NULL
+7	NULL
+8	NULL
+9	NULL
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(IF(CAST(key AS INT) DIV 10 < 5, 1, NULL), 0.5)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(IF(CAST(key AS INT) DIV 10 < 5, 1, NULL), 0.5)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	1.0
+1	1.0
+10	NULL
+11	NULL
+12	NULL
+13	NULL
+14	NULL
+15	NULL
+16	NULL
+17	NULL
+18	NULL
+19	NULL
+2	1.0
+20	NULL
+21	NULL
+22	NULL
+23	NULL
+24	NULL
+25	NULL
+26	NULL
+27	NULL
+28	NULL
+29	NULL
+3	1.0
+30	NULL
+31	NULL
+32	NULL
+33	NULL
+34	NULL
+35	NULL
+36	NULL
+37	NULL
+38	NULL
+39	NULL
+4	1.0
+40	NULL
+41	NULL
+42	NULL
+43	NULL
+44	NULL
+45	NULL
+46	NULL
+47	NULL
+48	NULL
+49	NULL
+5	NULL
+6	NULL
+7	NULL
+8	NULL
+9	NULL
+PREHOOK: query: select percentile_disc(cast(key as bigint), 0.5) from src where false
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select percentile_disc(cast(key as bigint), 0.5) from src where false
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+NULL
diff --git a/ql/src/test/results/clientpositive/udaf_percentile_disc.q.out b/ql/src/test/results/clientpositive/udaf_percentile_disc.q.out
new file mode 100644
index 0000000..75fa36c
--- /dev/null
+++ b/ql/src/test/results/clientpositive/udaf_percentile_disc.q.out
@@ -0,0 +1,421 @@
+PREHOOK: query: DESCRIBE FUNCTION percentile_disc
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION percentile_disc
+POSTHOOK: type: DESCFUNCTION
+percentile_disc(input, pc) - Returns the percentile of expr at pc (range: [0,1]) without interpolation.
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED percentile_disc
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED percentile_disc
+POSTHOOK: type: DESCFUNCTION
+percentile_disc(input, pc) - Returns the percentile of expr at pc (range: [0,1]) without interpolation.
+Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileDisc
+Function type:BUILTIN
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	5.0	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	118.0	119.0
+12	120.0	128.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	155.0	158.0
+16	160.0	167.0	169.0
+17	170.0	175.0	179.0
+18	180.0	187.0	189.0
+19	190.0	195.0	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	217.0	219.0
+22	221.0	224.0	229.0
+23	230.0	235.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	265.0	266.0
+27	272.0	275.0	278.0
+28	280.0	284.0	289.0
+29	291.0	298.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	325.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	377.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	43.0	47.0
+40	400.0	404.0	409.0
+41	411.0	417.0	419.0
+42	421.0	427.0	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	468.0	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	495.0	498.0
+5	51.0	54.0	58.0
+6	64.0	67.0	69.0
+7	70.0	74.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	5.0	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	118.0	119.0
+12	120.0	128.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	155.0	158.0
+16	160.0	167.0	169.0
+17	170.0	175.0	179.0
+18	180.0	187.0	189.0
+19	190.0	195.0	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	217.0	219.0
+22	221.0	224.0	229.0
+23	230.0	235.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	265.0	266.0
+27	272.0	275.0	278.0
+28	280.0	284.0	289.0
+29	291.0	298.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	325.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	377.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	43.0	47.0
+40	400.0	404.0	409.0
+41	411.0	417.0	419.0
+42	421.0	427.0	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	468.0	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	495.0	498.0
+5	51.0	54.0	58.0
+6	64.0	67.0	69.0
+7	70.0	74.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	5.0	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	118.0	119.0
+12	120.0	128.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	155.0	158.0
+16	160.0	167.0	169.0
+17	170.0	175.0	179.0
+18	180.0	187.0	189.0
+19	190.0	195.0	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	217.0	219.0
+22	221.0	224.0	229.0
+23	230.0	235.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	265.0	266.0
+27	272.0	275.0	278.0
+28	280.0	284.0	289.0
+29	291.0	298.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	325.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	377.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	43.0	47.0
+40	400.0	404.0	409.0
+41	411.0	417.0	419.0
+42	421.0	427.0	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	468.0	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	495.0	498.0
+5	51.0	54.0	58.0
+6	64.0	67.0	69.0
+7	70.0	74.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(CAST(substr(value, 5) AS INT), 0.0),
+       percentile_disc(CAST(substr(value, 5) AS DOUBLE), 0.5),
+       percentile_disc(CAST(substr(value, 5) AS DECIMAL), 1.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0.0	5.0	9.0
+1	10.0	15.0	19.0
+10	100.0	103.0	105.0
+11	111.0	118.0	119.0
+12	120.0	128.0	129.0
+13	131.0	137.0	138.0
+14	143.0	146.0	149.0
+15	150.0	155.0	158.0
+16	160.0	167.0	169.0
+17	170.0	175.0	179.0
+18	180.0	187.0	189.0
+19	190.0	195.0	199.0
+2	20.0	26.0	28.0
+20	200.0	205.0	209.0
+21	213.0	217.0	219.0
+22	221.0	224.0	229.0
+23	230.0	235.0	239.0
+24	241.0	244.0	249.0
+25	252.0	256.0	258.0
+26	260.0	265.0	266.0
+27	272.0	275.0	278.0
+28	280.0	284.0	289.0
+29	291.0	298.0	298.0
+3	30.0	35.0	37.0
+30	302.0	307.0	309.0
+31	310.0	316.0	318.0
+32	321.0	325.0	327.0
+33	331.0	333.0	339.0
+34	341.0	345.0	348.0
+35	351.0	353.0	356.0
+36	360.0	367.0	369.0
+37	373.0	377.0	379.0
+38	382.0	384.0	389.0
+39	392.0	396.0	399.0
+4	41.0	43.0	47.0
+40	400.0	404.0	409.0
+41	411.0	417.0	419.0
+42	421.0	427.0	429.0
+43	430.0	435.0	439.0
+44	443.0	446.0	449.0
+45	452.0	455.0	459.0
+46	460.0	468.0	469.0
+47	470.0	477.0	479.0
+48	480.0	484.0	489.0
+49	490.0	495.0	498.0
+5	51.0	54.0	58.0
+6	64.0	67.0	69.0
+7	70.0	74.0	78.0
+8	80.0	84.0	87.0
+9	90.0	95.0	98.0
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(NULL, 0.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(NULL, 0.0)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	NULL
+1	NULL
+10	NULL
+11	NULL
+12	NULL
+13	NULL
+14	NULL
+15	NULL
+16	NULL
+17	NULL
+18	NULL
+19	NULL
+2	NULL
+20	NULL
+21	NULL
+22	NULL
+23	NULL
+24	NULL
+25	NULL
+26	NULL
+27	NULL
+28	NULL
+29	NULL
+3	NULL
+30	NULL
+31	NULL
+32	NULL
+33	NULL
+34	NULL
+35	NULL
+36	NULL
+37	NULL
+38	NULL
+39	NULL
+4	NULL
+40	NULL
+41	NULL
+42	NULL
+43	NULL
+44	NULL
+45	NULL
+46	NULL
+47	NULL
+48	NULL
+49	NULL
+5	NULL
+6	NULL
+7	NULL
+8	NULL
+9	NULL
+PREHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(IF(CAST(key AS INT) DIV 10 < 5, 1, NULL), 0.5)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CAST(key AS INT) DIV 10,
+       percentile_disc(IF(CAST(key AS INT) DIV 10 < 5, 1, NULL), 0.5)
+FROM src
+GROUP BY CAST(key AS INT) DIV 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	1.0
+1	1.0
+10	NULL
+11	NULL
+12	NULL
+13	NULL
+14	NULL
+15	NULL
+16	NULL
+17	NULL
+18	NULL
+19	NULL
+2	1.0
+20	NULL
+21	NULL
+22	NULL
+23	NULL
+24	NULL
+25	NULL
+26	NULL
+27	NULL
+28	NULL
+29	NULL
+3	1.0
+30	NULL
+31	NULL
+32	NULL
+33	NULL
+34	NULL
+35	NULL
+36	NULL
+37	NULL
+38	NULL
+39	NULL
+4	1.0
+40	NULL
+41	NULL
+42	NULL
+43	NULL
+44	NULL
+45	NULL
+46	NULL
+47	NULL
+48	NULL
+49	NULL
+5	NULL
+6	NULL
+7	NULL
+8	NULL
+9	NULL
+PREHOOK: query: select percentile_disc(cast(key as bigint), 0.5) from src where false
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select percentile_disc(cast(key as bigint), 0.5) from src where false
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+NULL


[hive] 02/02: HIVE-15406: Consider vectorizing the new trunc function (Laszlo Bodor via Zoltan Haindrich)

Posted by kg...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit a7a9f516b3803830999f60d2bffb45e76c8a91ef
Author: Laszlo Bodor <bo...@gmail.com>
AuthorDate: Wed Mar 27 09:24:54 2019 +0100

    HIVE-15406: Consider vectorizing the new trunc function (Laszlo Bodor via Zoltan Haindrich)
    
    Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>
---
 .../exec/vector/expressions/TruncDateFromDate.java |   39 +
 .../vector/expressions/TruncDateFromString.java    |   51 +
 .../vector/expressions/TruncDateFromTimestamp.java |  164 +++
 .../ql/exec/vector/expressions/TruncDecimal.java   |   54 +
 .../vector/expressions/TruncDecimalNoScale.java    |   29 +
 .../ql/exec/vector/expressions/TruncFloat.java     |  149 +++
 .../exec/vector/expressions/TruncFloatNoScale.java |   29 +
 .../hive/ql/udf/generic/GenericUDFTrunc.java       |   13 +-
 .../test/queries/clientpositive/vector_udf_trunc.q |  110 ++
 .../results/clientpositive/vector_udf_trunc.q.out  | 1343 ++++++++++++++++++++
 10 files changed, 1979 insertions(+), 2 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromDate.java
new file mode 100644
index 0000000..e560de2
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromDate.java
@@ -0,0 +1,39 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType;
+
+/**
+ * Vectorized implementation of trunc(date, fmt) function date timestamp input
+ */
+public class TruncDateFromDate extends TruncDateFromTimestamp {
+  private transient Date date = new Date();
+
+  public TruncDateFromDate(int colNum, byte[] fmt, int outputColumnNum) {
+    super(colNum, fmt, outputColumnNum);
+  }
+
+  private static final long serialVersionUID = 1L;
+
+  public TruncDateFromDate() {
+    super();
+  }
+
+  protected void truncDate(ColumnVector inV, BytesColumnVector outV, int i) {
+    truncDate((LongColumnVector) inV, outV, i);
+  }
+
+  protected void truncDate(LongColumnVector inV, BytesColumnVector outV, int i) {
+    date = Date.ofEpochMilli(inV.vector[i]);
+    processDate(outV, i, date);
+  }
+
+  @Override
+  protected ArgumentType getInputColumnType() {
+    return VectorExpressionDescriptor.ArgumentType.DATE;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromString.java
new file mode 100644
index 0000000..0a0c65b
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromString.java
@@ -0,0 +1,51 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.nio.charset.StandardCharsets;
+
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType;
+
+/**
+ * Vectorized implementation of trunc(date, fmt) function for string input
+ */
+public class TruncDateFromString extends TruncDateFromTimestamp {
+  private transient Date date = new Date();
+
+  public TruncDateFromString(int colNum, byte[] fmt, int outputColumnNum) {
+    super(colNum, fmt, outputColumnNum);
+  }
+
+  private static final long serialVersionUID = 1L;
+
+  public TruncDateFromString() {
+    super();
+  }
+
+  protected void truncDate(ColumnVector inV, BytesColumnVector outV, int i) {
+    truncDate((BytesColumnVector) inV, outV, i);
+  }
+
+  protected void truncDate(BytesColumnVector inV, BytesColumnVector outV, int i) {
+    if (inV.vector[i] == null) {
+      outV.isNull[i] = true;
+      outV.noNulls = false;
+    }
+
+    String dateString =
+        new String(inV.vector[i], inV.start[i], inV.length[i], StandardCharsets.UTF_8);
+    if (dateParser.parseDate(dateString, date)) {
+      processDate(outV, i, date);
+    } else {
+      outV.isNull[i] = true;
+      outV.noNulls = false;
+    }
+  }
+
+  @Override
+  protected ArgumentType getInputColumnType() {
+    return VectorExpressionDescriptor.ArgumentType.STRING_FAMILY;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromTimestamp.java
new file mode 100644
index 0000000..1a1f146
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDateFromTimestamp.java
@@ -0,0 +1,164 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.common.util.DateParser;
+
+/**
+ * Vectorized implementation of trunc(date, fmt) function for timestamp input
+ */
+public class TruncDateFromTimestamp extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+  protected int colNum;
+  protected String fmt;
+  protected transient final DateParser dateParser = new DateParser();
+
+  public TruncDateFromTimestamp() {
+    super();
+    colNum = -1;
+  }
+
+  public TruncDateFromTimestamp(int colNum, byte[] fmt, int outputColumnNum) {
+    super(outputColumnNum);
+    this.colNum = colNum;
+    this.fmt = new String(fmt, StandardCharsets.UTF_8);
+  }
+
+  @Override
+  public String vectorExpressionParameters() {
+    return "col " + colNum + ", format " + fmt;
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) throws HiveException {
+
+    if (childExpressions != null) {
+      this.evaluateChildren(batch);
+    }
+
+    ColumnVector inputColVector = batch.cols[colNum];
+    BytesColumnVector outputColVector = (BytesColumnVector) batch.cols[outputColumnNum];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    // We do not need to do a column reset since we are carefully changing the output.
+    outputColVector.isRepeating = false;
+
+    if (inputColVector.isRepeating) {
+      if (inputColVector.noNulls || !inputIsNull[0]) {
+        outputIsNull[0] = false;
+        truncDate(inputColVector, outputColVector, 0);
+      } else {
+        outputIsNull[0] = true;
+        outputColVector.noNulls = false;
+      }
+      outputColVector.isRepeating = true;
+      return;
+    }
+
+    if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+
+        // CONSIDER: For large n, fill n or all of isNull array and use the tighter ELSE loop.
+        if (!outputColVector.noNulls) {
+          for (int j = 0; j != n; j++) {
+            final int i = sel[j];
+            // Set isNull before call in case it changes it mind.
+            outputIsNull[i] = false;
+            truncDate(inputColVector, outputColVector, i);
+          }
+        } else {
+          for (int j = 0; j != n; j++) {
+            final int i = sel[j];
+            truncDate(inputColVector, outputColVector, i);
+          }
+        }
+      } else {
+        if (!outputColVector.noNulls) {
+
+          // Assume it is almost always a performance win to fill all of isNull so we can
+          // safely reset noNulls.
+          Arrays.fill(outputIsNull, false);
+          outputColVector.noNulls = true;
+        }
+        for (int i = 0; i != n; i++) {
+          truncDate(inputColVector, outputColVector, i);
+        }
+      }
+    } else /* there are nulls in the inputColVector */ {
+
+      // Carefully handle NULLs...
+      outputColVector.noNulls = false;
+
+      if (batch.selectedInUse) {
+        for (int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputIsNull[i] = inputIsNull[i];
+          truncDate(inputColVector, outputColVector, i);
+        }
+      } else {
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+        for (int i = 0; i != n; i++) {
+          if (!inputColVector.isNull[i]) {
+            truncDate(inputColVector, outputColVector, i);
+          }
+        }
+      }
+    }
+  }
+
+  protected void truncDate(ColumnVector inV, BytesColumnVector outV, int i) {
+    Date date = Date.ofEpochMilli(((TimestampColumnVector) inV).getTime(i));
+    processDate(outV, i, date);
+  }
+
+  protected void processDate(BytesColumnVector outV, int i, Date date) {
+    if ("MONTH".equals(fmt) || "MON".equals(fmt) || "MM".equals(fmt)) {
+      date.setDayOfMonth(1);
+    } else if ("QUARTER".equals(fmt) || "Q".equals(fmt)) {
+      int month = date.getMonth() - 1;
+      int quarter = month / 3;
+      int monthToSet = quarter * 3 + 1;
+      date.setMonth(monthToSet);
+      date.setDayOfMonth(1);
+    } else if ("YEAR".equals(fmt) || "YYYY".equals(fmt) || "YY".equals(fmt)) {
+      date.setMonth(1);
+      date.setDayOfMonth(1);
+    }
+    byte[] bytes = date.toString().getBytes(StandardCharsets.UTF_8);
+    outV.setVal(i, bytes, 0, bytes.length);
+  }
+
+  @Override
+  public Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION).setNumArguments(2)
+        .setArgumentTypes(getInputColumnType(),
+            VectorExpressionDescriptor.ArgumentType.STRING_FAMILY)
+        .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR);
+    return b.build();
+  }
+
+  protected ArgumentType getInputColumnType() {
+    return VectorExpressionDescriptor.ArgumentType.TIMESTAMP;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimal.java
new file mode 100644
index 0000000..7b25cc6
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimal.java
@@ -0,0 +1,54 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+
+/**
+ * Vectorized implementation of trunc(number, scale) function for decimal input
+ */
+public class TruncDecimal extends TruncFloat {
+  /**
+   * 
+   */
+  private static final long serialVersionUID = 1L;
+
+  public TruncDecimal() {
+    super();
+  }
+
+  public TruncDecimal(int colNum, int scale, int outputColumnNum) {
+    super(colNum, scale, outputColumnNum);
+  }
+
+  @Override
+  protected void trunc(ColumnVector inputColVector, ColumnVector outputColVector, int i) {
+    HiveDecimal input = ((DecimalColumnVector) inputColVector).vector[i].getHiveDecimal();
+
+    HiveDecimal output = trunc(input);
+    ((DecimalColumnVector) outputColVector).vector[i] = new HiveDecimalWritable(output);
+  }
+
+  protected HiveDecimal trunc(HiveDecimal input) {
+    HiveDecimal pow = HiveDecimal.create(Math.pow(10, Math.abs(scale)));
+
+    if (scale >= 0) {
+      if (scale != 0) {
+        long longValue = input.multiply(pow).longValue();
+        return HiveDecimal.create(longValue).divide(pow);
+      } else {
+        return HiveDecimal.create(input.longValue());
+      }
+    } else {
+      long longValue2 = input.divide(pow).longValue();
+      return HiveDecimal.create(longValue2).multiply(pow);
+    }
+  }
+
+  protected ArgumentType getInputColumnType() {
+    return VectorExpressionDescriptor.ArgumentType.DECIMAL;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimalNoScale.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimalNoScale.java
new file mode 100644
index 0000000..c427db8
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncDecimalNoScale.java
@@ -0,0 +1,29 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
+
+/**
+ * Vectorized implementation of trunc(number) function for decimal input
+ */
+public class TruncDecimalNoScale extends TruncDecimal {
+  private static final long serialVersionUID = 1L;
+
+  public TruncDecimalNoScale() {
+    super();
+    colNum = -1;
+  }
+
+  public TruncDecimalNoScale(int colNum, int outputColumnNum) {
+    super(colNum, 0, outputColumnNum);
+  }
+
+  @Override
+  public Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION).setNumArguments(1)
+        .setArgumentTypes(getInputColumnType())
+        .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN);
+    return b.build();
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloat.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloat.java
new file mode 100644
index 0000000..93f175b
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloat.java
@@ -0,0 +1,149 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.math.BigDecimal;
+import java.util.Arrays;
+
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Vectorized implementation of trunc(number, scale) function for float/double input
+ */
+public class TruncFloat extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+  protected int colNum;
+  protected int scale;
+
+  public TruncFloat() {
+    super();
+    colNum = -1;
+  }
+
+  public TruncFloat(int colNum, int scale, int outputColumnNum) {
+    super(outputColumnNum);
+    this.colNum = colNum;
+    this.scale = scale;
+  }
+
+  @Override
+  public String vectorExpressionParameters() {
+    return "col " + colNum + ", scale " + scale;
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) throws HiveException {
+    if (childExpressions != null) {
+      this.evaluateChildren(batch);
+    }
+
+    ColumnVector inputColVector = batch.cols[colNum];
+    ColumnVector outputColVector = batch.cols[outputColumnNum];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    int n = batch.size;
+
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating = false;
+
+    if (inputColVector.isRepeating) {
+      if (inputColVector.noNulls || !inputIsNull[0]) {
+        outputIsNull[0] = false;
+        trunc(inputColVector, outputColVector, 0);
+      } else {
+        outputIsNull[0] = true;
+        outputColVector.noNulls = false;
+      }
+      outputColVector.isRepeating = true;
+      return;
+    }
+
+    if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+        if (!outputColVector.noNulls) {
+          for (int j = 0; j != n; j++) {
+            final int i = sel[j];
+            outputIsNull[i] = false;
+            trunc(inputColVector, outputColVector, i);
+          }
+        } else {
+          for (int j = 0; j != n; j++) {
+            final int i = sel[j];
+            trunc(inputColVector, outputColVector, i);
+          }
+        }
+      } else {
+        if (!outputColVector.noNulls) {
+          Arrays.fill(outputIsNull, false);
+          outputColVector.noNulls = true;
+        }
+        for (int i = 0; i != n; i++) {
+          trunc(inputColVector, outputColVector, i);
+        }
+      }
+    } else {
+      outputColVector.noNulls = false;
+
+      if (batch.selectedInUse) {
+        for (int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputIsNull[i] = inputIsNull[i];
+          trunc(inputColVector, outputColVector, i);
+        }
+      } else {
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+        for (int i = 0; i != n; i++) {
+          if (!inputColVector.isNull[i]) {
+            trunc(inputColVector, outputColVector, i);
+          }
+        }
+      }
+    }
+  }
+
+  protected void trunc(ColumnVector inputColVector, ColumnVector outputColVector, int i) {
+    BigDecimal input = BigDecimal.valueOf(((DoubleColumnVector) inputColVector).vector[i]);
+
+    double output = trunc(input).doubleValue();
+    ((DoubleColumnVector) outputColVector).vector[i] = output;
+  }
+
+  protected BigDecimal trunc(BigDecimal input) {
+    BigDecimal pow = BigDecimal.valueOf(Math.pow(10, Math.abs(scale)));
+
+    if (scale >= 0) {
+      if (scale != 0) {
+        long longValue = input.multiply(pow).longValue();
+        return BigDecimal.valueOf(longValue).divide(pow);
+      } else {
+        return BigDecimal.valueOf(input.longValue());
+      }
+    } else {
+      long longValue2 = input.divide(pow).longValue();
+      return BigDecimal.valueOf(longValue2).multiply(pow);
+    }
+  }
+
+  @Override
+  public Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION).setNumArguments(2)
+        .setArgumentTypes(getInputColumnType(), VectorExpressionDescriptor.ArgumentType.INT_FAMILY)
+        .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR);
+    return b.build();
+  }
+
+  protected ArgumentType getInputColumnType() {
+    return VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloatNoScale.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloatNoScale.java
new file mode 100644
index 0000000..1309428
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncFloatNoScale.java
@@ -0,0 +1,29 @@
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
+
+/**
+ * Vectorized implementation of trunc(number) function for float/double input
+ */
+public class TruncFloatNoScale extends TruncFloat {
+  private static final long serialVersionUID = 1L;
+
+  public TruncFloatNoScale() {
+    super();
+    colNum = -1;
+  }
+
+  public TruncFloatNoScale(int colNum, int outputColumnNum) {
+    super(colNum, 0, outputColumnNum);
+  }
+
+  @Override
+  public Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION).setNumArguments(1)
+        .setArgumentTypes(getInputColumnType())
+        .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN);
+    return b.build();
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
index 7a7d13e..c56d0f2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
@@ -28,6 +28,14 @@ import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncDateFromDate;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncDateFromString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncDateFromTimestamp;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncDecimal;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncDecimalNoScale;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncFloat;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TruncFloatNoScale;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritableV2;
@@ -62,8 +70,7 @@ import org.apache.hadoop.io.Text;
     + "to the unit specified by the format model fmt. If you omit fmt, then date is truncated to "
     + "the nearest day. It currently only supports 'MONTH'/'MON'/'MM', 'QUARTER'/'Q' and 'YEAR'/'YYYY'/'YY' as format."
     + "If input is a number group returns N truncated to D decimal places. If D is omitted, then N is truncated to 0 places."
-    + "D can be negative to truncate (make zero) D digits left of the decimal point."
-    , extended = "date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'."
+    + "D can be negative to truncate (make zero) D digits left of the decimal point.", extended = "date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'."
         + " The time part of date is ignored.\n" + "Example:\n "
         + " > SELECT _FUNC_('2009-02-12', 'MM');\n" + "OK\n" + " '2009-02-01'" + "\n"
         + " > SELECT _FUNC_('2017-03-15', 'Q');\n" + "OK\n" + " '2017-01-01'" + "\n"
@@ -72,6 +79,8 @@ import org.apache.hadoop.io.Text;
         + " > SELECT _FUNC_(1234567891.1234567891,-4);\n" + "OK\n" + " 1234560000"
         + " > SELECT _FUNC_(1234567891.1234567891,0);\n" + "OK\n" + " 1234567891" + "\n"
         + " > SELECT _FUNC_(1234567891.1234567891);\n" + "OK\n" + " 1234567891")
+@VectorizedExpressions({ TruncDateFromTimestamp.class, TruncDateFromString.class,
+    TruncDateFromDate.class, TruncFloat.class, TruncFloatNoScale.class, TruncDecimal.class, TruncDecimalNoScale.class})
 public class GenericUDFTrunc extends GenericUDF {
 
   private transient TimestampConverter timestampConverter;
diff --git a/ql/src/test/queries/clientpositive/vector_udf_trunc.q b/ql/src/test/queries/clientpositive/vector_udf_trunc.q
new file mode 100644
index 0000000..51ed109
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/vector_udf_trunc.q
@@ -0,0 +1,110 @@
+--! qt:dataset:alltypesorc
+set hive.fetch.task.conversion=none;
+set hive.vectorized.execution.enabled=true;
+
+DESCRIBE FUNCTION trunc;
+DESCRIBE FUNCTION EXTENDED trunc;
+
+CREATE TABLE trunc_number(c DOUBLE) STORED AS ORC;
+INSERT INTO TABLE trunc_number VALUES (12345.54321);
+INSERT INTO TABLE trunc_number VALUES (12345);
+INSERT INTO TABLE trunc_number VALUES (0.54321);
+INSERT INTO TABLE trunc_number VALUES (NULL);
+
+-- trunc date from timestamp
+explain vectorization detail select trunc(ctimestamp1, 'MM') from alltypesorc;
+
+select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(ctimestamp1, 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(ctimestamp1, 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+
+-- trunc date from string
+explain vectorization detail select trunc(CAST(ctimestamp1 AS STRING), 'MM') from alltypesorc;
+
+select trunc(CAST(ctimestamp1 AS STRING), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS STRING), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS STRING), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+
+-- trunc date from date
+explain vectorization detail select trunc(CAST(ctimestamp1 AS DATE), 'MM') from alltypesorc;
+
+select trunc(CAST(ctimestamp1 AS DATE), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS DATE), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS DATE), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+
+select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc WHERE ctimestamp1 IS NULL LIMIT 10;
+
+-- trunc double
+explain vectorization detail
+select c, trunc(c,0) from trunc_number order by c;
+select c, 0, trunc(c,0) from trunc_number order by c;
+select c, -1, trunc(c,-1) from trunc_number order by c;
+select c, 1, trunc(c,1) from trunc_number order by c;
+
+-- trunc float
+explain vectorization detail
+select c, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c;
+select c, 0, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c;
+select c, -1, trunc(CAST (c AS FLOAT), -1) from trunc_number order by c;
+select c, 1, trunc(CAST (c AS FLOAT), 1) from trunc_number order by c;
+
+-- trunc decimal
+explain vectorization detail
+select c, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c;
+select c, 0, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c;
+select c, -1, trunc(CAST (c AS DECIMAL(10,5)), -1) from trunc_number order by c;
+select c, 1, trunc(CAST (c AS DECIMAL(10,5)), 1) from trunc_number order by c;
+
+-- scale not defined -> 0 (float)
+explain vectorization detail
+select c, trunc(c) from trunc_number order by c;
+select c, trunc(c) from trunc_number order by c;
+
+-- scale not defined -> 0 (decimal)
+explain vectorization detail
+select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c;
+select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c;
+
+
+
+set hive.vectorized.execution.enabled=false;
+
+-- trunc date from timestamp
+select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(ctimestamp1, 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(ctimestamp1, 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+
+-- trunc date from string
+select trunc(CAST(ctimestamp1 AS STRING), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS STRING), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS STRING), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+
+-- trunc date from date
+select trunc(CAST(ctimestamp1 AS DATE), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS DATE), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+select trunc(CAST(ctimestamp1 AS DATE), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10;
+
+select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc WHERE ctimestamp1 IS NULL LIMIT 10;
+
+-- trunc double
+select c, 0, trunc(c,0) from trunc_number order by c;
+select c, -1, trunc(c,-1) from trunc_number order by c;
+select c, 1, trunc(c,1) from trunc_number order by c;
+
+-- trunc float
+select c, 0, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c;
+select c, -1, trunc(CAST (c AS FLOAT), -1) from trunc_number order by c;
+select c, 1, trunc(CAST (c AS FLOAT), 1) from trunc_number order by c;
+
+-- trunc decimal
+select c, 0, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c;
+select c, -1, trunc(CAST (c AS DECIMAL(10,5)), -1) from trunc_number order by c;
+select c, 1, trunc(CAST (c AS DECIMAL(10,5)), 1) from trunc_number order by c;
+
+-- scale not defined -> 0 (float)
+select c, trunc(c) from trunc_number order by c;
+
+-- scale not defined -> 0 (decimal)
+select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c;
+
+drop table trunc_number;
diff --git a/ql/src/test/results/clientpositive/vector_udf_trunc.q.out b/ql/src/test/results/clientpositive/vector_udf_trunc.q.out
new file mode 100644
index 0000000..a6ce91f
--- /dev/null
+++ b/ql/src/test/results/clientpositive/vector_udf_trunc.q.out
@@ -0,0 +1,1343 @@
+PREHOOK: query: DESCRIBE FUNCTION trunc
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION trunc
+POSTHOOK: type: DESCFUNCTION
+trunc(date, fmt) / trunc(N,D) - Returns If input is date returns date with the time portion of the day truncated to the unit specified by the format model fmt. If you omit fmt, then date is truncated to the nearest day. It currently only supports 'MONTH'/'MON'/'MM', 'QUARTER'/'Q' and 'YEAR'/'YYYY'/'YY' as format.If input is a number group returns N truncated to D decimal places. If D is omitted, then N is truncated to 0 places.D can be negative to truncate (make zero) D digits left of th [...]
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED trunc
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED trunc
+POSTHOOK: type: DESCFUNCTION
+trunc(date, fmt) / trunc(N,D) - Returns If input is date returns date with the time portion of the day truncated to the unit specified by the format model fmt. If you omit fmt, then date is truncated to the nearest day. It currently only supports 'MONTH'/'MON'/'MM', 'QUARTER'/'Q' and 'YEAR'/'YYYY'/'YY' as format.If input is a number group returns N truncated to D decimal places. If D is omitted, then N is truncated to 0 places.D can be negative to truncate (make zero) D digits left of th [...]
+date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'. The time part of date is ignored.
+Example:
+  > SELECT trunc('2009-02-12', 'MM');
+OK
+ '2009-02-01'
+ > SELECT trunc('2017-03-15', 'Q');
+OK
+ '2017-01-01'
+ > SELECT trunc('2015-10-27', 'YEAR');
+OK
+ '2015-01-01' > SELECT trunc(1234567891.1234567891,4);
+OK
+ 1234567891.1234
+ > SELECT trunc(1234567891.1234567891,-4);
+OK
+ 1234560000 > SELECT trunc(1234567891.1234567891,0);
+OK
+ 1234567891
+ > SELECT trunc(1234567891.1234567891);
+OK
+ 1234567891
+Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDFTrunc
+Function type:BUILTIN
+PREHOOK: query: CREATE TABLE trunc_number(c DOUBLE) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@trunc_number
+POSTHOOK: query: CREATE TABLE trunc_number(c DOUBLE) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@trunc_number
+PREHOOK: query: INSERT INTO TABLE trunc_number VALUES (12345.54321)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@trunc_number
+POSTHOOK: query: INSERT INTO TABLE trunc_number VALUES (12345.54321)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@trunc_number
+POSTHOOK: Lineage: trunc_number.c SCRIPT []
+PREHOOK: query: INSERT INTO TABLE trunc_number VALUES (12345)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@trunc_number
+POSTHOOK: query: INSERT INTO TABLE trunc_number VALUES (12345)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@trunc_number
+POSTHOOK: Lineage: trunc_number.c SCRIPT []
+PREHOOK: query: INSERT INTO TABLE trunc_number VALUES (0.54321)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@trunc_number
+POSTHOOK: query: INSERT INTO TABLE trunc_number VALUES (0.54321)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@trunc_number
+POSTHOOK: Lineage: trunc_number.c SCRIPT []
+PREHOOK: query: INSERT INTO TABLE trunc_number VALUES (NULL)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@trunc_number
+POSTHOOK: query: INSERT INTO TABLE trunc_number VALUES (NULL)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@trunc_number
+POSTHOOK: Lineage: trunc_number.c EXPRESSION []
+PREHOOK: query: explain vectorization detail select trunc(ctimestamp1, 'MM') from alltypesorc
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail select trunc(ctimestamp1, 'MM') from alltypesorc
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: alltypesorc
+            Statistics: Num rows: 12288 Data size: 491520 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: trunc(ctimestamp1, 'MM') (type: string)
+              outputColumnNames: _col0
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [13]
+                  selectExpressions: TruncDateFromTimestamp(col 8, format MM) -> 13:string
+              Statistics: Num rows: 12288 Data size: 2260992 Basic stats: COMPLETE Column stats: COMPLETE
+              File Output Operator
+                compressed: false
+                File Sink Vectorization:
+                    className: VectorFileSinkOperator
+                    native: false
+                Statistics: Num rows: 12288 Data size: 2260992 Basic stats: COMPLETE Column stats: COMPLETE
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 12
+              includeColumns: [8]
+              dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [string]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:43.619
+1969-12-01	1969-12-31 15:59:43.627
+1969-12-01	1969-12-31 15:59:43.628
+1969-12-01	1969-12-31 15:59:43.631
+1969-12-01	1969-12-31 15:59:43.637
+1969-12-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(ctimestamp1, 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:43.619
+1969-10-01	1969-12-31 15:59:43.627
+1969-10-01	1969-12-31 15:59:43.628
+1969-10-01	1969-12-31 15:59:43.631
+1969-10-01	1969-12-31 15:59:43.637
+1969-10-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(ctimestamp1, 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:43.619
+1969-01-01	1969-12-31 15:59:43.627
+1969-01-01	1969-12-31 15:59:43.628
+1969-01-01	1969-12-31 15:59:43.631
+1969-01-01	1969-12-31 15:59:43.637
+1969-01-01	1969-12-31 15:59:43.64
+PREHOOK: query: explain vectorization detail select trunc(CAST(ctimestamp1 AS STRING), 'MM') from alltypesorc
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail select trunc(CAST(ctimestamp1 AS STRING), 'MM') from alltypesorc
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: alltypesorc
+            Statistics: Num rows: 12288 Data size: 491520 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: trunc(CAST( ctimestamp1 AS STRING), 'MM') (type: string)
+              outputColumnNames: _col0
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [14]
+                  selectExpressions: TruncDateFromString(col 13, format MM)(children: CastTimestampToString(col 8:timestamp) -> 13:string) -> 14:string
+              Statistics: Num rows: 12288 Data size: 2260992 Basic stats: COMPLETE Column stats: COMPLETE
+              File Output Operator
+                compressed: false
+                File Sink Vectorization:
+                    className: VectorFileSinkOperator
+                    native: false
+                Statistics: Num rows: 12288 Data size: 2260992 Basic stats: COMPLETE Column stats: COMPLETE
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 12
+              includeColumns: [8]
+              dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [string, string]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:43.619
+1969-12-01	1969-12-31 15:59:43.627
+1969-12-01	1969-12-31 15:59:43.628
+1969-12-01	1969-12-31 15:59:43.631
+1969-12-01	1969-12-31 15:59:43.637
+1969-12-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:43.619
+1969-10-01	1969-12-31 15:59:43.627
+1969-10-01	1969-12-31 15:59:43.628
+1969-10-01	1969-12-31 15:59:43.631
+1969-10-01	1969-12-31 15:59:43.637
+1969-10-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:43.619
+1969-01-01	1969-12-31 15:59:43.627
+1969-01-01	1969-12-31 15:59:43.628
+1969-01-01	1969-12-31 15:59:43.631
+1969-01-01	1969-12-31 15:59:43.637
+1969-01-01	1969-12-31 15:59:43.64
+PREHOOK: query: explain vectorization detail select trunc(CAST(ctimestamp1 AS DATE), 'MM') from alltypesorc
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail select trunc(CAST(ctimestamp1 AS DATE), 'MM') from alltypesorc
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: alltypesorc
+            Statistics: Num rows: 12288 Data size: 491520 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: trunc(CAST( ctimestamp1 AS DATE), 'MM') (type: string)
+              outputColumnNames: _col0
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [14]
+                  selectExpressions: TruncDateFromDate(col 13, format MM)(children: CastTimestampToDate(col 8:timestamp) -> 13:date) -> 14:string
+              Statistics: Num rows: 12288 Data size: 2260992 Basic stats: COMPLETE Column stats: COMPLETE
+              File Output Operator
+                compressed: false
+                File Sink Vectorization:
+                    className: VectorFileSinkOperator
+                    native: false
+                Statistics: Num rows: 12288 Data size: 2260992 Basic stats: COMPLETE Column stats: COMPLETE
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 12
+              includeColumns: [8]
+              dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [bigint, string]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:43.619
+1969-12-01	1969-12-31 15:59:43.627
+1969-12-01	1969-12-31 15:59:43.628
+1969-12-01	1969-12-31 15:59:43.631
+1969-12-01	1969-12-31 15:59:43.637
+1969-12-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:43.619
+1969-10-01	1969-12-31 15:59:43.627
+1969-10-01	1969-12-31 15:59:43.628
+1969-10-01	1969-12-31 15:59:43.631
+1969-10-01	1969-12-31 15:59:43.637
+1969-10-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:43.619
+1969-01-01	1969-12-31 15:59:43.627
+1969-01-01	1969-12-31 15:59:43.628
+1969-01-01	1969-12-31 15:59:43.631
+1969-01-01	1969-12-31 15:59:43.637
+1969-01-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc WHERE ctimestamp1 IS NULL LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc WHERE ctimestamp1 IS NULL LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+PREHOOK: query: explain vectorization detail
+select c, trunc(c,0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail
+select c, trunc(c,0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: trunc_number
+            Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:c:double, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: c (type: double), trunc(c, 0) (type: double)
+              outputColumnNames: _col0, _col1
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [0, 2]
+                  selectExpressions: TruncFloat(col 0, scale 0) -> 2:double
+              Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+              Reduce Output Operator
+                key expressions: _col0 (type: double)
+                sort order: +
+                Reduce Sink Vectorization:
+                    className: VectorReduceSinkOperator
+                    native: false
+                    nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                    nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+                Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+                value expressions: _col1 (type: double)
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 1
+              includeColumns: [0]
+              dataColumns: c:double
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [double]
+      Reduce Vectorization:
+          enabled: false
+          enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true
+          enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select c, 0, trunc(c,0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 0, trunc(c,0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0	0.0
+12345.0	0	12345.0
+12345.54321	0	12345.0
+NULL	0	NULL
+PREHOOK: query: select c, -1, trunc(c,-1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, -1, trunc(c,-1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	-1	0.0
+12345.0	-1	12340.0
+12345.54321	-1	12340.0
+NULL	-1	NULL
+PREHOOK: query: select c, 1, trunc(c,1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 1, trunc(c,1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	1	0.5
+12345.0	1	12345.0
+12345.54321	1	12345.5
+NULL	1	NULL
+PREHOOK: query: explain vectorization detail
+select c, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail
+select c, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: trunc_number
+            Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:c:double, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: c (type: double), trunc(UDFToFloat(c), 0) (type: float)
+              outputColumnNames: _col0, _col1
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [0, 2]
+                  selectExpressions: TruncFloat(col 0, scale 0)(children: col 0:double) -> 2:float
+              Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+              Reduce Output Operator
+                key expressions: _col0 (type: double)
+                sort order: +
+                Reduce Sink Vectorization:
+                    className: VectorReduceSinkOperator
+                    native: false
+                    nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                    nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+                Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+                value expressions: _col1 (type: float)
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 1
+              includeColumns: [0]
+              dataColumns: c:double
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [double]
+      Reduce Vectorization:
+          enabled: false
+          enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true
+          enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: float)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select c, 0, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 0, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0	0.0
+12345.0	0	12345.0
+12345.54321	0	12345.0
+NULL	0	NULL
+PREHOOK: query: select c, -1, trunc(CAST (c AS FLOAT), -1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, -1, trunc(CAST (c AS FLOAT), -1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	-1	0.0
+12345.0	-1	12340.0
+12345.54321	-1	12340.0
+NULL	-1	NULL
+PREHOOK: query: select c, 1, trunc(CAST (c AS FLOAT), 1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 1, trunc(CAST (c AS FLOAT), 1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	1	0.5
+12345.0	1	12345.0
+12345.54321	1	12345.5
+NULL	1	NULL
+PREHOOK: query: explain vectorization detail
+select c, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail
+select c, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: trunc_number
+            Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:c:double, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: c (type: double), trunc(CAST( c AS decimal(10,5)), 0) (type: decimal(38,18))
+              outputColumnNames: _col0, _col1
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [0, 3]
+                  selectExpressions: TruncDecimal(col 2, scale 0)(children: CastDoubleToDecimal(col 0:double) -> 2:decimal(10,5)) -> 3:decimal(38,18)
+              Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+              Reduce Output Operator
+                key expressions: _col0 (type: double)
+                sort order: +
+                Reduce Sink Vectorization:
+                    className: VectorReduceSinkOperator
+                    native: false
+                    nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                    nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+                Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+                value expressions: _col1 (type: decimal(38,18))
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 1
+              includeColumns: [0]
+              dataColumns: c:double
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [decimal(10,5), decimal(38,18)]
+      Reduce Vectorization:
+          enabled: false
+          enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true
+          enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: decimal(38,18))
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select c, 0, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 0, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0	0.000000000000000000
+12345.0	0	12345.000000000000000000
+12345.54321	0	12345.000000000000000000
+NULL	0	NULL
+PREHOOK: query: select c, -1, trunc(CAST (c AS DECIMAL(10,5)), -1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, -1, trunc(CAST (c AS DECIMAL(10,5)), -1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	-1	0.000000000000000000
+12345.0	-1	12340.000000000000000000
+12345.54321	-1	12340.000000000000000000
+NULL	-1	NULL
+PREHOOK: query: select c, 1, trunc(CAST (c AS DECIMAL(10,5)), 1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 1, trunc(CAST (c AS DECIMAL(10,5)), 1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	1	0.500000000000000000
+12345.0	1	12345.000000000000000000
+12345.54321	1	12345.500000000000000000
+NULL	1	NULL
+PREHOOK: query: explain vectorization detail
+select c, trunc(c) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail
+select c, trunc(c) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: trunc_number
+            Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:c:double, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: c (type: double), trunc(c) (type: double)
+              outputColumnNames: _col0, _col1
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [0, 2]
+                  selectExpressions: TruncFloatNoScale(col 0, scale 0) -> 2:double
+              Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+              Reduce Output Operator
+                key expressions: _col0 (type: double)
+                sort order: +
+                Reduce Sink Vectorization:
+                    className: VectorReduceSinkOperator
+                    native: false
+                    nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                    nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+                Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+                value expressions: _col1 (type: double)
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 1
+              includeColumns: [0]
+              dataColumns: c:double
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [double]
+      Reduce Vectorization:
+          enabled: false
+          enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true
+          enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select c, trunc(c) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, trunc(c) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0.0
+12345.0	12345.0
+12345.54321	12345.0
+NULL	NULL
+PREHOOK: query: explain vectorization detail
+select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail
+select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: trunc_number
+            Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:c:double, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Select Operator
+              expressions: c (type: double), trunc(CAST( c AS decimal(10,5))) (type: decimal(38,18))
+              outputColumnNames: _col0, _col1
+              Select Vectorization:
+                  className: VectorSelectOperator
+                  native: true
+                  projectedOutputColumnNums: [0, 3]
+                  selectExpressions: TruncDecimalNoScale(col 2, scale 0)(children: CastDoubleToDecimal(col 0:double) -> 2:decimal(10,5)) -> 3:decimal(38,18)
+              Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+              Reduce Output Operator
+                key expressions: _col0 (type: double)
+                sort order: +
+                Reduce Sink Vectorization:
+                    className: VectorReduceSinkOperator
+                    native: false
+                    nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                    nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+                Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+                value expressions: _col1 (type: decimal(38,18))
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 1
+              includeColumns: [0]
+              dataColumns: c:double
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [decimal(10,5), decimal(38,18)]
+      Reduce Vectorization:
+          enabled: false
+          enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true
+          enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: decimal(38,18))
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 4 Data size: 480 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0.000000000000000000
+12345.0	12345.000000000000000000
+12345.54321	12345.000000000000000000
+NULL	NULL
+PREHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:43.619
+1969-12-01	1969-12-31 15:59:43.627
+1969-12-01	1969-12-31 15:59:43.628
+1969-12-01	1969-12-31 15:59:43.631
+1969-12-01	1969-12-31 15:59:43.637
+1969-12-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(ctimestamp1, 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:43.619
+1969-10-01	1969-12-31 15:59:43.627
+1969-10-01	1969-12-31 15:59:43.628
+1969-10-01	1969-12-31 15:59:43.631
+1969-10-01	1969-12-31 15:59:43.637
+1969-10-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(ctimestamp1, 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:43.619
+1969-01-01	1969-12-31 15:59:43.627
+1969-01-01	1969-12-31 15:59:43.628
+1969-01-01	1969-12-31 15:59:43.631
+1969-01-01	1969-12-31 15:59:43.637
+1969-01-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:43.619
+1969-12-01	1969-12-31 15:59:43.627
+1969-12-01	1969-12-31 15:59:43.628
+1969-12-01	1969-12-31 15:59:43.631
+1969-12-01	1969-12-31 15:59:43.637
+1969-12-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:43.619
+1969-10-01	1969-12-31 15:59:43.627
+1969-10-01	1969-12-31 15:59:43.628
+1969-10-01	1969-12-31 15:59:43.631
+1969-10-01	1969-12-31 15:59:43.637
+1969-10-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS STRING), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:43.619
+1969-01-01	1969-12-31 15:59:43.627
+1969-01-01	1969-12-31 15:59:43.628
+1969-01-01	1969-12-31 15:59:43.631
+1969-01-01	1969-12-31 15:59:43.637
+1969-01-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'MM'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:30.929
+1969-12-01	1969-12-31 15:59:43.619
+1969-12-01	1969-12-31 15:59:43.627
+1969-12-01	1969-12-31 15:59:43.628
+1969-12-01	1969-12-31 15:59:43.631
+1969-12-01	1969-12-31 15:59:43.637
+1969-12-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'Q'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:30.929
+1969-10-01	1969-12-31 15:59:43.619
+1969-10-01	1969-12-31 15:59:43.627
+1969-10-01	1969-12-31 15:59:43.628
+1969-10-01	1969-12-31 15:59:43.631
+1969-10-01	1969-12-31 15:59:43.637
+1969-10-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(CAST(ctimestamp1 AS DATE), 'YEAR'), ctimestamp1 from alltypesorc order by ctimestamp1 LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:30.929
+1969-01-01	1969-12-31 15:59:43.619
+1969-01-01	1969-12-31 15:59:43.627
+1969-01-01	1969-12-31 15:59:43.628
+1969-01-01	1969-12-31 15:59:43.631
+1969-01-01	1969-12-31 15:59:43.637
+1969-01-01	1969-12-31 15:59:43.64
+PREHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc WHERE ctimestamp1 IS NULL LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc(ctimestamp1, 'MM'), ctimestamp1 from alltypesorc WHERE ctimestamp1 IS NULL LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+NULL	NULL
+PREHOOK: query: select c, 0, trunc(c,0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 0, trunc(c,0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0	0.0
+12345.0	0	12345.0
+12345.54321	0	12345.0
+NULL	0	NULL
+PREHOOK: query: select c, -1, trunc(c,-1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, -1, trunc(c,-1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	-1	0.0
+12345.0	-1	12340.0
+12345.54321	-1	12340.0
+NULL	-1	NULL
+PREHOOK: query: select c, 1, trunc(c,1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 1, trunc(c,1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	1	0.5
+12345.0	1	12345.0
+12345.54321	1	12345.5
+NULL	1	NULL
+PREHOOK: query: select c, 0, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 0, trunc(CAST (c AS FLOAT), 0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0	0.0
+12345.0	0	12345.0
+12345.54321	0	12345.0
+NULL	0	NULL
+PREHOOK: query: select c, -1, trunc(CAST (c AS FLOAT), -1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, -1, trunc(CAST (c AS FLOAT), -1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	-1	0.0
+12345.0	-1	12340.0
+12345.54321	-1	12340.0
+NULL	-1	NULL
+PREHOOK: query: select c, 1, trunc(CAST (c AS FLOAT), 1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 1, trunc(CAST (c AS FLOAT), 1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	1	0.5
+12345.0	1	12345.0
+12345.54321	1	12345.5
+NULL	1	NULL
+PREHOOK: query: select c, 0, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 0, trunc(CAST (c AS DECIMAL(10,5)), 0) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0	0.000000000000000000
+12345.0	0	12345.000000000000000000
+12345.54321	0	12345.000000000000000000
+NULL	0	NULL
+PREHOOK: query: select c, -1, trunc(CAST (c AS DECIMAL(10,5)), -1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, -1, trunc(CAST (c AS DECIMAL(10,5)), -1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	-1	0.000000000000000000
+12345.0	-1	12340.000000000000000000
+12345.54321	-1	12340.000000000000000000
+NULL	-1	NULL
+PREHOOK: query: select c, 1, trunc(CAST (c AS DECIMAL(10,5)), 1) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, 1, trunc(CAST (c AS DECIMAL(10,5)), 1) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	1	0.500000000000000000
+12345.0	1	12345.000000000000000000
+12345.54321	1	12345.500000000000000000
+NULL	1	NULL
+PREHOOK: query: select c, trunc(c) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, trunc(c) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0.0
+12345.0	12345.0
+12345.54321	12345.0
+NULL	NULL
+PREHOOK: query: select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+POSTHOOK: query: select c, trunc(CAST (c AS DECIMAL(10,5))) from trunc_number order by c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@trunc_number
+#### A masked pattern was here ####
+0.54321	0.000000000000000000
+12345.0	12345.000000000000000000
+12345.54321	12345.000000000000000000
+NULL	NULL
+PREHOOK: query: drop table trunc_number
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@trunc_number
+PREHOOK: Output: default@trunc_number
+POSTHOOK: query: drop table trunc_number
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@trunc_number
+POSTHOOK: Output: default@trunc_number