You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2016/11/30 22:28:56 UTC

hive git commit: HIVE-14582 : Add trunc(numeric) udf (Chinna Rao Llam via Ashutosh Chauhan)

Repository: hive
Updated Branches:
  refs/heads/master 7a30ac4ab -> 76b311f59


HIVE-14582 : Add trunc(numeric) udf (Chinna Rao Llam via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan <ha...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/76b311f5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/76b311f5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/76b311f5

Branch: refs/heads/master
Commit: 76b311f59a87bc2350a6dd06f1ef2b0700d1b163
Parents: 7a30ac4
Author: Chinna Rao L <ch...@apache.org>
Authored: Thu Sep 22 03:49:00 2016 -0800
Committer: Ashutosh Chauhan <ha...@apache.org>
Committed: Wed Nov 30 14:27:49 2016 -0800

----------------------------------------------------------------------
 data/files/trunc_number.txt                     |   4 +
 data/files/trunc_number1.txt                    |   4 +
 .../hive/ql/udf/generic/GenericUDFTrunc.java    | 323 +++++++++++++++--
 .../queries/clientnegative/udf_trunc_error3.q   |   1 +
 .../queries/clientpositive/udf_trunc_number.q   |  72 ++++
 .../clientnegative/udf_trunc_error1.q.out       |   2 +-
 .../clientnegative/udf_trunc_error2.q.out       |   2 +-
 .../clientnegative/udf_trunc_error3.q.out       |   1 +
 .../test/results/clientpositive/udf_trunc.q.out |  16 +-
 .../clientpositive/udf_trunc_number.q.out       | 350 +++++++++++++++++++
 10 files changed, 746 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/76b311f5/data/files/trunc_number.txt
----------------------------------------------------------------------
diff --git a/data/files/trunc_number.txt b/data/files/trunc_number.txt
new file mode 100644
index 0000000..c4d474d
--- /dev/null
+++ b/data/files/trunc_number.txt
@@ -0,0 +1,4 @@
+12345.1234,3
+12345.1234,-4
+12345.1234,0
+12345.1234
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/76b311f5/data/files/trunc_number1.txt
----------------------------------------------------------------------
diff --git a/data/files/trunc_number1.txt b/data/files/trunc_number1.txt
new file mode 100644
index 0000000..5b2c46c
--- /dev/null
+++ b/data/files/trunc_number1.txt
@@ -0,0 +1,4 @@
+12345,3
+12345,-4
+12345,0
+12345
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/76b311f5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
index e20ad65..036d112 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
@@ -19,47 +19,61 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.math.BigDecimal;
 import java.sql.Timestamp;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
 /**
  * GenericUDFTrunc.
  *
- * Returns the first day of the month which the date belongs to.
- * The time part of the date will be  ignored.
+ * Returns the first day of the month which the date belongs to. The time part of the date will be
+ * ignored.
  *
  */
-@Description(name = "trunc",
-value = "_FUNC_(date, fmt) - Returns returns date with the time portion of the day truncated "
+@Description(name = "trunc", value = "_FUNC_(date, fmt) / _FUNC_(N,D) - Returns If input is date returns date with the time portion of the day truncated "
     + "to the unit specified by the format model fmt. If you omit fmt, then date is truncated to "
-    + "the nearest day. It now only supports 'MONTH'/'MON'/'MM' and 'YEAR'/'YYYY'/'YY' as format.",
-extended = "date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'."
-    + " The time part of date is ignored.\n"
-    + "Example:\n "
-    + " > SELECT _FUNC_('2009-02-12', 'MM');\n" + "OK\n" + " '2009-02-01'" + "\n"
-    + " > SELECT _FUNC_('2015-10-27', 'YEAR');\n" + "OK\n" + " '2015-01-01'")
+    + "the nearest day. It now only supports 'MONTH'/'MON'/'MM' and 'YEAR'/'YYYY'/'YY' as format."
+    + "If input is a number group returns N truncated to D decimal places. If D is omitted, then N is truncated to 0 places."
+    + "D can be negative to truncate (make zero) D digits left of the decimal point."
+    , extended = "date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'."
+        + " The time part of date is ignored.\n" + "Example:\n "
+        + " > SELECT _FUNC_('2009-02-12', 'MM');\n" + "OK\n" + " '2009-02-01'" + "\n"
+        + " > SELECT _FUNC_('2015-10-27', 'YEAR');\n" + "OK\n" + " '2015-01-01'"
+        + " > SELECT _FUNC_(1234567891.1234567891,4);\n" + "OK\n" + " 1234567891.1234" + "\n"
+        + " > SELECT _FUNC_(1234567891.1234567891,-4);\n" + "OK\n" + " 1234560000"
+        + " > SELECT _FUNC_(1234567891.1234567891,0);\n" + "OK\n" + " 1234567891" + "\n"
+        + " > SELECT _FUNC_(1234567891.1234567891);\n" + "OK\n" + " 1234567891")
 public class GenericUDFTrunc extends GenericUDF {
 
   private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
@@ -67,14 +81,138 @@ public class GenericUDFTrunc extends GenericUDF {
   private transient Converter textConverter1;
   private transient Converter textConverter2;
   private transient Converter dateWritableConverter;
+  private transient Converter byteConverter;
+  private transient Converter shortConverter;
+  private transient Converter intConverter;
+  private transient Converter longConverter;
   private transient PrimitiveCategory inputType1;
   private transient PrimitiveCategory inputType2;
   private final Calendar calendar = Calendar.getInstance();
   private final Text output = new Text();
   private transient String fmtInput;
+  private transient PrimitiveObjectInspector inputOI;
+  private transient PrimitiveObjectInspector inputScaleOI;
+  private int scale = 0;
+  private boolean inputSacleConst;
+  private boolean dateTypeArg;
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    if (arguments.length == 2) {
+      inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
+      inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
+      if ((PrimitiveObjectInspectorUtils
+          .getPrimitiveGrouping(inputType1) == PrimitiveGrouping.DATE_GROUP
+          || PrimitiveObjectInspectorUtils
+              .getPrimitiveGrouping(inputType1) == PrimitiveGrouping.STRING_GROUP)
+          && PrimitiveObjectInspectorUtils
+              .getPrimitiveGrouping(inputType2) == PrimitiveGrouping.STRING_GROUP) {
+        dateTypeArg = true;
+        return initializeDate(arguments);
+      } else if (PrimitiveObjectInspectorUtils
+          .getPrimitiveGrouping(inputType1) == PrimitiveGrouping.NUMERIC_GROUP
+          && PrimitiveObjectInspectorUtils
+              .getPrimitiveGrouping(inputType2) == PrimitiveGrouping.NUMERIC_GROUP) {
+        dateTypeArg = false;
+        return initializeNumber(arguments);
+      }
+      throw new UDFArgumentException("Got wrong argument types : first argument type : "
+          + arguments[0].getTypeName() + ", second argument type : " + arguments[1].getTypeName());
+    } else if (arguments.length == 1) {
+      inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
+      if (PrimitiveObjectInspectorUtils
+          .getPrimitiveGrouping(inputType1) == PrimitiveGrouping.NUMERIC_GROUP) {
+        dateTypeArg = false;
+        return initializeNumber(arguments);
+      } else {
+        throw new UDFArgumentException(
+            "Only primitive type arguments are accepted, when arguments lenght is one, got "
+                + arguments[1].getTypeName());
+      }
+    }
+    throw new UDFArgumentException("TRUNC requires one or two argument, got " + arguments.length);
+  }
+
+  private ObjectInspector initializeNumber(ObjectInspector[] arguments)
+      throws UDFArgumentException {
+    if (arguments.length < 1 || arguments.length > 2) {
+      throw new UDFArgumentLengthException(
+          "TRUNC requires one or two argument, got " + arguments.length);
+    }
+
+    if (arguments[0].getCategory() != Category.PRIMITIVE) {
+      throw new UDFArgumentTypeException(0,
+          "TRUNC input only takes primitive types, got " + arguments[0].getTypeName());
+    }
+    inputOI = (PrimitiveObjectInspector) arguments[0];
+
+    if (arguments.length == 2) {
+      if (arguments[1].getCategory() != Category.PRIMITIVE) {
+        throw new UDFArgumentTypeException(1,
+            "TRUNC second argument only takes primitive types, got " + arguments[1].getTypeName());
+      }
+
+      inputScaleOI = (PrimitiveObjectInspector) arguments[1];
+      inputSacleConst = arguments[1] instanceof ConstantObjectInspector;
+      if (inputSacleConst) {
+        try {
+          Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
+          fmtInput = obj != null ? obj.toString() : null;
+          scale = Integer.parseInt(fmtInput);
+        } catch (Exception e) {
+          throw new UDFArgumentException("TRUNC input only takes integer values, got " + fmtInput);
+        }
+      } else {
+        switch (inputScaleOI.getPrimitiveCategory()) {
+        case BYTE:
+          byteConverter = ObjectInspectorConverters.getConverter(arguments[1],
+              PrimitiveObjectInspectorFactory.writableByteObjectInspector);
+          break;
+        case SHORT:
+          shortConverter = ObjectInspectorConverters.getConverter(arguments[1],
+              PrimitiveObjectInspectorFactory.writableShortObjectInspector);
+          break;
+        case INT:
+          intConverter = ObjectInspectorConverters.getConverter(arguments[1],
+              PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+          break;
+        case LONG:
+          longConverter = ObjectInspectorConverters.getConverter(arguments[1],
+              PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+          break;
+        default:
+          throw new UDFArgumentTypeException(1,
+              getFuncName().toUpperCase() + " second argument only takes integer values");
+        }
+      }
+    }
+
+    inputType1 = inputOI.getPrimitiveCategory();
+    ObjectInspector outputOI = null;
+    switch (inputType1) {
+    case DECIMAL:
+      outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputType1);
+      break;
+    case VOID:
+    case BYTE:
+    case SHORT:
+    case INT:
+    case LONG:
+    case FLOAT:
+    case DOUBLE:
+      outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputType1);
+      break;
+    default:
+      throw new UDFArgumentTypeException(0,
+          "Only numeric or string group data types are allowed for TRUNC function. Got "
+              + inputType1.name());
+    }
+
+    return outputOI;
+  }
+
+  private ObjectInspector initializeDate(ObjectInspector[] arguments)
+      throws UDFArgumentLengthException, UDFArgumentTypeException {
     if (arguments.length != 2) {
       throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
     }
@@ -97,8 +235,7 @@ public class GenericUDFTrunc extends GenericUDF {
     case CHAR:
     case VOID:
       inputType1 = PrimitiveCategory.STRING;
-      textConverter1 = ObjectInspectorConverters.getConverter(
-          arguments[0],
+      textConverter1 = ObjectInspectorConverters.getConverter(arguments[0],
           PrimitiveObjectInspectorFactory.writableStringObjectInspector);
       break;
     case TIMESTAMP:
@@ -106,8 +243,7 @@ public class GenericUDFTrunc extends GenericUDF {
           PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
       break;
     case DATE:
-      dateWritableConverter = ObjectInspectorConverters.getConverter(
-          arguments[0],
+      dateWritableConverter = ObjectInspectorConverters.getConverter(arguments[0],
           PrimitiveObjectInspectorFactory.writableDateObjectInspector);
       break;
     default:
@@ -117,9 +253,10 @@ public class GenericUDFTrunc extends GenericUDF {
     }
 
     inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
-    if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2)
-        != PrimitiveGrouping.STRING_GROUP && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2)
-        != PrimitiveGrouping.VOID_GROUP) {
+    if (PrimitiveObjectInspectorUtils
+        .getPrimitiveGrouping(inputType2) != PrimitiveGrouping.STRING_GROUP
+        && PrimitiveObjectInspectorUtils
+            .getPrimitiveGrouping(inputType2) != PrimitiveGrouping.VOID_GROUP) {
       throw new UDFArgumentTypeException(1,
           "trunk() only takes STRING/CHAR/VARCHAR types as second argument, got " + inputType2);
     }
@@ -130,16 +267,23 @@ public class GenericUDFTrunc extends GenericUDF {
       Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
       fmtInput = obj != null ? obj.toString() : null;
     } else {
-      textConverter2 = ObjectInspectorConverters.getConverter(
-          arguments[1],
+      textConverter2 = ObjectInspectorConverters.getConverter(arguments[1],
           PrimitiveObjectInspectorFactory.writableStringObjectInspector);
     }
-
     return outputOI;
   }
 
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    if (dateTypeArg) {
+      return evaluateDate(arguments);
+    } else {
+      return evaluateNumber(arguments);
+    }
+  }
+
+  private Object evaluateDate(DeferredObject[] arguments) throws UDFArgumentLengthException,
+      HiveException, UDFArgumentTypeException, UDFArgumentException {
     if (arguments.length != 2) {
       throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
     }
@@ -163,8 +307,8 @@ public class GenericUDFTrunc extends GenericUDF {
       }
       break;
     case TIMESTAMP:
-      Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get()))
-          .getTimestamp();
+      Timestamp ts =
+          ((TimestampWritable) timestampConverter.convert(arguments[0].get())).getTimestamp();
       date = ts;
       break;
     case DATE:
@@ -185,6 +329,98 @@ public class GenericUDFTrunc extends GenericUDF {
     return output;
   }
 
+  private Object evaluateNumber(DeferredObject[] arguments)
+      throws HiveException, UDFArgumentTypeException {
+
+    if (arguments[0] == null) {
+      return null;
+    }
+
+    Object input = arguments[0].get();
+    if (input == null) {
+      return null;
+    }
+
+    if (arguments.length == 2 && arguments[1] != null && arguments[1].get() != null
+        && !inputSacleConst) {
+      Object scaleObj = null;
+      switch (inputScaleOI.getPrimitiveCategory()) {
+      case BYTE:
+        scaleObj = byteConverter.convert(arguments[1].get());
+        scale = ((ByteWritable) scaleObj).get();
+        break;
+      case SHORT:
+        scaleObj = shortConverter.convert(arguments[1].get());
+        scale = ((ShortWritable) scaleObj).get();
+        break;
+      case INT:
+        scaleObj = intConverter.convert(arguments[1].get());
+        scale = ((IntWritable) scaleObj).get();
+        break;
+      case LONG:
+        scaleObj = longConverter.convert(arguments[1].get());
+        long l = ((LongWritable) scaleObj).get();
+        if (l < Integer.MIN_VALUE || l > Integer.MAX_VALUE) {
+          throw new UDFArgumentException(
+              getFuncName().toUpperCase() + " scale argument out of allowed range");
+        }
+        scale = (int) l;
+      default:
+        break;
+      }
+    }
+
+    switch (inputType1) {
+    case VOID:
+      return null;
+    case DECIMAL:
+      HiveDecimalWritable decimalWritable =
+          (HiveDecimalWritable) inputOI.getPrimitiveWritableObject(input);
+      HiveDecimal dec = trunc(decimalWritable.getHiveDecimal(), scale);
+      if (dec == null) {
+        return null;
+      }
+      return new HiveDecimalWritable(dec);
+    case BYTE:
+      ByteWritable byteWritable = (ByteWritable) inputOI.getPrimitiveWritableObject(input);
+      if (scale >= 0) {
+        return byteWritable;
+      } else {
+        return new ByteWritable((byte) trunc(byteWritable.get(), scale));
+      }
+    case SHORT:
+      ShortWritable shortWritable = (ShortWritable) inputOI.getPrimitiveWritableObject(input);
+      if (scale >= 0) {
+        return shortWritable;
+      } else {
+        return new ShortWritable((short) trunc(shortWritable.get(), scale));
+      }
+    case INT:
+      IntWritable intWritable = (IntWritable) inputOI.getPrimitiveWritableObject(input);
+      if (scale >= 0) {
+        return intWritable;
+      } else {
+        return new IntWritable((int) trunc(intWritable.get(), scale));
+      }
+    case LONG:
+      LongWritable longWritable = (LongWritable) inputOI.getPrimitiveWritableObject(input);
+      if (scale >= 0) {
+        return longWritable;
+      } else {
+        return new LongWritable(trunc(longWritable.get(), scale));
+      }
+    case FLOAT:
+      float f = ((FloatWritable) inputOI.getPrimitiveWritableObject(input)).get();
+      return new FloatWritable((float) trunc(f, scale));
+    case DOUBLE:
+      return trunc(((DoubleWritable) inputOI.getPrimitiveWritableObject(input)), scale);
+    default:
+      throw new UDFArgumentTypeException(0,
+          "Only numeric or string group data types are allowed for TRUNC function. Got "
+              + inputType1.name());
+    }
+  }
+
   @Override
   public String getDisplayString(String[] children) {
     return getStandardDisplayString("trunc", children);
@@ -203,4 +439,43 @@ public class GenericUDFTrunc extends GenericUDF {
       return null;
     }
   }
+
+  protected HiveDecimal trunc(HiveDecimal input, int scale) {
+    BigDecimal bigDecimal = trunc(input.bigDecimalValue(), scale);
+    return HiveDecimal.create(bigDecimal);
+  }
+
+  protected long trunc(long input, int scale) {
+    return trunc(BigDecimal.valueOf(input), scale).longValue();
+  }
+
+  protected double trunc(double input, int scale) {
+    return trunc(BigDecimal.valueOf(input), scale).doubleValue();
+  }
+
+  protected DoubleWritable trunc(DoubleWritable input, int scale) {
+    BigDecimal bigDecimal = new BigDecimal(input.get());
+    BigDecimal trunc = trunc(bigDecimal, scale);
+    DoubleWritable doubleWritable = new DoubleWritable(trunc.doubleValue());
+    return doubleWritable;
+  }
+
+  protected BigDecimal trunc(BigDecimal input, int scale) {
+    BigDecimal output = new BigDecimal(0);
+    BigDecimal pow = BigDecimal.valueOf(Math.pow(10, Math.abs(scale)));
+    if (scale >= 0) {
+      pow = BigDecimal.valueOf(Math.pow(10, scale));
+      if (scale != 0) {
+        long longValue = input.multiply(pow).longValue();
+        output = BigDecimal.valueOf(longValue).divide(pow);
+      } else {
+        output = BigDecimal.valueOf(input.longValue());
+      }
+    } else {
+      long longValue2 = input.divide(pow).longValue();
+      output = BigDecimal.valueOf(longValue2).multiply(pow);
+    }
+    return output;
+  }
+  
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/76b311f5/ql/src/test/queries/clientnegative/udf_trunc_error3.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/udf_trunc_error3.q b/ql/src/test/queries/clientnegative/udf_trunc_error3.q
new file mode 100644
index 0000000..f9d1fff
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/udf_trunc_error3.q
@@ -0,0 +1 @@
+SELECT TRUNC(1.0, 12.123);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/76b311f5/ql/src/test/queries/clientpositive/udf_trunc_number.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/udf_trunc_number.q b/ql/src/test/queries/clientpositive/udf_trunc_number.q
new file mode 100644
index 0000000..b3fd9e5
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/udf_trunc_number.q
@@ -0,0 +1,72 @@
+set hive.fetch.task.conversion=more;
+
+EXPLAIN SELECT trunc(1234567891.1234567891,4), trunc(1234567891.1234567891,-4), trunc(1234567891.1234567891,0), trunc(1234567891.1234567891) FROM src tablesample (1 rows);
+
+SELECT trunc(1234567891.1234567891,4), trunc(1234567891.1234567891,-4), trunc(1234567891.1234567891,0), trunc(1234567891.1234567891) FROM src tablesample (1 rows);
+
+DROP TABLE sampletable;
+
+CREATE TABLE sampletable(c DOUBLE, d INT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH '../../data/files/trunc_number.txt' INTO TABLE sampletable;
+
+EXPLAIN select trunc (c,d) from sampletable;
+
+select trunc (c,d) from sampletable;
+
+CREATE TABLE sampletable1(c FLOAT, d SMALLINT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH '../../data/files/trunc_number.txt' INTO TABLE sampletable1;
+
+EXPLAIN select trunc (c,d) from sampletable1;
+
+select trunc (c,d) from sampletable1;
+
+CREATE TABLE sampletable2(c DOUBLE, d BIGINT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH '../../data/files/trunc_number.txt' INTO TABLE sampletable2;
+
+EXPLAIN select trunc (c,d) from sampletable2;
+
+select trunc (c,d) from sampletable2;
+
+CREATE TABLE sampletable3(c BIGINT, d SMALLINT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH '../../data/files/trunc_number1.txt' INTO TABLE sampletable3;
+
+EXPLAIN select trunc (c,d) from sampletable3;
+
+select trunc (c,d) from sampletable3;
+
+CREATE TABLE sampletable4(c INT, d INT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH '../../data/files/trunc_number1.txt' INTO TABLE sampletable4;
+
+EXPLAIN select trunc (c,d) from sampletable4;
+
+select trunc (c,d) from sampletable4;
+
+DROP TABLE sampletable;
+
+DROP TABLE sampletable1;
+
+DROP TABLE sampletable2;
+
+DROP TABLE sampletable3;
+
+DROP TABLE sampletable4;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/76b311f5/ql/src/test/results/clientnegative/udf_trunc_error1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/udf_trunc_error1.q.out b/ql/src/test/results/clientnegative/udf_trunc_error1.q.out
index 5d65b11..00de8f4 100644
--- a/ql/src/test/results/clientnegative/udf_trunc_error1.q.out
+++ b/ql/src/test/results/clientnegative/udf_trunc_error1.q.out
@@ -1 +1 @@
-FAILED: SemanticException [Error 10016]: Line 1:27 Argument type mismatch '1': trunk() only takes STRING/CHAR/VARCHAR types as second argument, got INT
+FAILED: SemanticException [Error 10014]: Line 1:7 Wrong arguments '1': Got wrong argument types : first argument type : string, second argument type : int

http://git-wip-us.apache.org/repos/asf/hive/blob/76b311f5/ql/src/test/results/clientnegative/udf_trunc_error2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/udf_trunc_error2.q.out b/ql/src/test/results/clientnegative/udf_trunc_error2.q.out
index 55a2185..01c7c52 100644
--- a/ql/src/test/results/clientnegative/udf_trunc_error2.q.out
+++ b/ql/src/test/results/clientnegative/udf_trunc_error2.q.out
@@ -1 +1 @@
-FAILED: SemanticException [Error 10016]: Line 1:13 Argument type mismatch '1.0': TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got DECIMAL
+FAILED: SemanticException [Error 10014]: Line 1:7 Wrong arguments ''MM'': Got wrong argument types : first argument type : decimal(1,0), second argument type : string

http://git-wip-us.apache.org/repos/asf/hive/blob/76b311f5/ql/src/test/results/clientnegative/udf_trunc_error3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/udf_trunc_error3.q.out b/ql/src/test/results/clientnegative/udf_trunc_error3.q.out
new file mode 100644
index 0000000..51a6f4e
--- /dev/null
+++ b/ql/src/test/results/clientnegative/udf_trunc_error3.q.out
@@ -0,0 +1 @@
+FAILED: SemanticException [Error 10014]: Line 1:7 Wrong arguments '12.123': TRUNC input only takes integer values, got 12.123

http://git-wip-us.apache.org/repos/asf/hive/blob/76b311f5/ql/src/test/results/clientpositive/udf_trunc.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_trunc.q.out b/ql/src/test/results/clientpositive/udf_trunc.q.out
index 4c9f76d..034caab 100644
--- a/ql/src/test/results/clientpositive/udf_trunc.q.out
+++ b/ql/src/test/results/clientpositive/udf_trunc.q.out
@@ -2,12 +2,12 @@ PREHOOK: query: DESCRIBE FUNCTION trunc
 PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION trunc
 POSTHOOK: type: DESCFUNCTION
-trunc(date, fmt) - Returns returns date with the time portion of the day truncated to the unit specified by the format model fmt. If you omit fmt, then date is truncated to the nearest day. It now only supports 'MONTH'/'MON'/'MM' and 'YEAR'/'YYYY'/'YY' as format.
+trunc(date, fmt) / trunc(N,D) - Returns If input is date returns date with the time portion of the day truncated to the unit specified by the format model fmt. If you omit fmt, then date is truncated to the nearest day. It now only supports 'MONTH'/'MON'/'MM' and 'YEAR'/'YYYY'/'YY' as format.If input is a number group returns N truncated to D decimal places. If D is omitted, then N is truncated to 0 places.D can be negative to truncate (make zero) D digits left of the decimal point.
 PREHOOK: query: DESCRIBE FUNCTION EXTENDED trunc
 PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION EXTENDED trunc
 POSTHOOK: type: DESCFUNCTION
-trunc(date, fmt) - Returns returns date with the time portion of the day truncated to the unit specified by the format model fmt. If you omit fmt, then date is truncated to the nearest day. It now only supports 'MONTH'/'MON'/'MM' and 'YEAR'/'YYYY'/'YY' as format.
+trunc(date, fmt) / trunc(N,D) - Returns If input is date returns date with the time portion of the day truncated to the unit specified by the format model fmt. If you omit fmt, then date is truncated to the nearest day. It now only supports 'MONTH'/'MON'/'MM' and 'YEAR'/'YYYY'/'YY' as format.If input is a number group returns N truncated to D decimal places. If D is omitted, then N is truncated to 0 places.D can be negative to truncate (make zero) D digits left of the decimal point.
 date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'. The time part of date is ignored.
 Example:
   > SELECT trunc('2009-02-12', 'MM');
@@ -15,7 +15,17 @@ OK
  '2009-02-01'
  > SELECT trunc('2015-10-27', 'YEAR');
 OK
- '2015-01-01'
+ '2015-01-01' > SELECT trunc(1234567891.1234567891,4);
+OK
+ 1234567891.1234
+ > SELECT trunc(1234567891.1234567891,-4);
+OK
+ 1234560000 > SELECT trunc(1234567891.1234567891,0);
+OK
+ 1234567891
+ > SELECT trunc(1234567891.1234567891);
+OK
+ 1234567891
 PREHOOK: query: --test string with 'MM' as format
 EXPLAIN
 SELECT

http://git-wip-us.apache.org/repos/asf/hive/blob/76b311f5/ql/src/test/results/clientpositive/udf_trunc_number.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_trunc_number.q.out b/ql/src/test/results/clientpositive/udf_trunc_number.q.out
new file mode 100644
index 0000000..dfc9d70
--- /dev/null
+++ b/ql/src/test/results/clientpositive/udf_trunc_number.q.out
@@ -0,0 +1,350 @@
+PREHOOK: query: EXPLAIN SELECT trunc(1234567891.1234567891,4), trunc(1234567891.1234567891,-4), trunc(1234567891.1234567891,0), trunc(1234567891.1234567891) FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT trunc(1234567891.1234567891,4), trunc(1234567891.1234567891,-4), trunc(1234567891.1234567891,0), trunc(1234567891.1234567891) FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: src
+          Row Limit Per Split: 1
+          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: 1234567891.1234 (type: decimal(38,18)), 1234560000 (type: decimal(38,18)), 1234567891 (type: decimal(38,18)), 1234567891 (type: decimal(38,18))
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Statistics: Num rows: 500 Data size: 224000 Basic stats: COMPLETE Column stats: COMPLETE
+            ListSink
+
+PREHOOK: query: SELECT trunc(1234567891.1234567891,4), trunc(1234567891.1234567891,-4), trunc(1234567891.1234567891,0), trunc(1234567891.1234567891) FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT trunc(1234567891.1234567891,4), trunc(1234567891.1234567891,-4), trunc(1234567891.1234567891,0), trunc(1234567891.1234567891) FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+1234567891.1234	1234560000	1234567891	1234567891
+PREHOOK: query: DROP TABLE sampletable
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE sampletable
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE sampletable(c DOUBLE, d INT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sampletable
+POSTHOOK: query: CREATE TABLE sampletable(c DOUBLE, d INT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@sampletable
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/trunc_number.txt' INTO TABLE sampletable
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@sampletable
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/trunc_number.txt' INTO TABLE sampletable
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@sampletable
+PREHOOK: query: EXPLAIN select trunc (c,d) from sampletable
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN select trunc (c,d) from sampletable
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: sampletable
+          Statistics: Num rows: 4 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: trunc(c, d) (type: double)
+            outputColumnNames: _col0
+            Statistics: Num rows: 4 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+            ListSink
+
+PREHOOK: query: select trunc (c,d) from sampletable
+PREHOOK: type: QUERY
+PREHOOK: Input: default@sampletable
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc (c,d) from sampletable
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@sampletable
+#### A masked pattern was here ####
+12345.123
+10000.0
+12345.0
+12345.0
+PREHOOK: query: CREATE TABLE sampletable1(c FLOAT, d SMALLINT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sampletable1
+POSTHOOK: query: CREATE TABLE sampletable1(c FLOAT, d SMALLINT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@sampletable1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/trunc_number.txt' INTO TABLE sampletable1
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@sampletable1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/trunc_number.txt' INTO TABLE sampletable1
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@sampletable1
+PREHOOK: query: EXPLAIN select trunc (c,d) from sampletable1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN select trunc (c,d) from sampletable1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: sampletable1
+          Statistics: Num rows: 6 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: trunc(c, d) (type: float)
+            outputColumnNames: _col0
+            Statistics: Num rows: 6 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+            ListSink
+
+PREHOOK: query: select trunc (c,d) from sampletable1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@sampletable1
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc (c,d) from sampletable1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@sampletable1
+#### A masked pattern was here ####
+12345.123
+10000.0
+12345.0
+12345.0
+PREHOOK: query: CREATE TABLE sampletable2(c DOUBLE, d BIGINT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sampletable2
+POSTHOOK: query: CREATE TABLE sampletable2(c DOUBLE, d BIGINT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@sampletable2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/trunc_number.txt' INTO TABLE sampletable2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@sampletable2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/trunc_number.txt' INTO TABLE sampletable2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@sampletable2
+PREHOOK: query: EXPLAIN select trunc (c,d) from sampletable2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN select trunc (c,d) from sampletable2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: sampletable2
+          Statistics: Num rows: 3 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: trunc(c, d) (type: double)
+            outputColumnNames: _col0
+            Statistics: Num rows: 3 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+            ListSink
+
+PREHOOK: query: select trunc (c,d) from sampletable2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@sampletable2
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc (c,d) from sampletable2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@sampletable2
+#### A masked pattern was here ####
+12345.123
+10000.0
+12345.0
+12345.0
+PREHOOK: query: CREATE TABLE sampletable3(c BIGINT, d SMALLINT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sampletable3
+POSTHOOK: query: CREATE TABLE sampletable3(c BIGINT, d SMALLINT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@sampletable3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/trunc_number1.txt' INTO TABLE sampletable3
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@sampletable3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/trunc_number1.txt' INTO TABLE sampletable3
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@sampletable3
+PREHOOK: query: EXPLAIN select trunc (c,d) from sampletable3
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN select trunc (c,d) from sampletable3
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: sampletable3
+          Statistics: Num rows: 2 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: trunc(c, d) (type: bigint)
+            outputColumnNames: _col0
+            Statistics: Num rows: 2 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+            ListSink
+
+PREHOOK: query: select trunc (c,d) from sampletable3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@sampletable3
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc (c,d) from sampletable3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@sampletable3
+#### A masked pattern was here ####
+12345
+10000
+12345
+12345
+PREHOOK: query: CREATE TABLE sampletable4(c INT, d INT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sampletable4
+POSTHOOK: query: CREATE TABLE sampletable4(c INT, d INT)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY ','
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@sampletable4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/trunc_number1.txt' INTO TABLE sampletable4
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@sampletable4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/trunc_number1.txt' INTO TABLE sampletable4
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@sampletable4
+PREHOOK: query: EXPLAIN select trunc (c,d) from sampletable4
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN select trunc (c,d) from sampletable4
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: sampletable4
+          Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: trunc(c, d) (type: int)
+            outputColumnNames: _col0
+            Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+            ListSink
+
+PREHOOK: query: select trunc (c,d) from sampletable4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@sampletable4
+#### A masked pattern was here ####
+POSTHOOK: query: select trunc (c,d) from sampletable4
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@sampletable4
+#### A masked pattern was here ####
+12345
+10000
+12345
+12345
+PREHOOK: query: DROP TABLE sampletable
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@sampletable
+PREHOOK: Output: default@sampletable
+POSTHOOK: query: DROP TABLE sampletable
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@sampletable
+POSTHOOK: Output: default@sampletable
+PREHOOK: query: DROP TABLE sampletable1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@sampletable1
+PREHOOK: Output: default@sampletable1
+POSTHOOK: query: DROP TABLE sampletable1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@sampletable1
+POSTHOOK: Output: default@sampletable1
+PREHOOK: query: DROP TABLE sampletable2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@sampletable2
+PREHOOK: Output: default@sampletable2
+POSTHOOK: query: DROP TABLE sampletable2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@sampletable2
+POSTHOOK: Output: default@sampletable2
+PREHOOK: query: DROP TABLE sampletable3
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@sampletable3
+PREHOOK: Output: default@sampletable3
+POSTHOOK: query: DROP TABLE sampletable3
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@sampletable3
+POSTHOOK: Output: default@sampletable3
+PREHOOK: query: DROP TABLE sampletable4
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@sampletable4
+PREHOOK: Output: default@sampletable4
+POSTHOOK: query: DROP TABLE sampletable4
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@sampletable4
+POSTHOOK: Output: default@sampletable4