You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2018/06/28 15:45:14 UTC

[30/34] hive git commit: HIVE-20007: Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
index 857f300..f3ec709 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
@@ -66,7 +66,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMin;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -180,7 +180,7 @@ public class StatsOptimizer extends Transform {
 
     enum DateSubType {
       DAYS {@Override
-        Object cast(long longValue) { return (new DateWritable((int)longValue)).get();}
+        Object cast(long longValue) { return (new DateWritableV2((int)longValue)).get();}
       };
       abstract Object cast(long longValue);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
index 12af94e..d950991 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
@@ -18,8 +18,6 @@
 package org.apache.hadoop.hive.ql.optimizer.calcite.translator;
 
 import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.LinkedList;
 import java.util.List;
@@ -45,9 +43,11 @@ import org.apache.calcite.sql.type.SqlTypeUtil;
 import org.apache.calcite.util.DateString;
 import org.apache.calcite.util.TimeString;
 import org.apache.calcite.util.TimestampString;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.Hive;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
index 7cedab6..2ae015a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
@@ -46,12 +46,14 @@ import org.apache.calcite.util.ConversionUtil;
 import org.apache.calcite.util.DateString;
 import org.apache.calcite.util.NlsString;
 import org.apache.calcite.util.TimestampString;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -99,14 +101,11 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
 import java.math.BigDecimal;
 import java.math.BigInteger;
-import java.sql.Timestamp;
 import java.time.Instant;
 import java.util.ArrayList;
 import java.util.Calendar;
-import java.util.Date;
 import java.util.LinkedHashMap;
 import java.util.List;
-import java.util.Locale;
 import java.util.Map;
 
 public class RexNodeConverter {
@@ -394,8 +393,6 @@ public class RexNodeConverter {
       GenericUDF udf = func.getGenericUDF();
       if ((udf instanceof GenericUDFToChar) || (udf instanceof GenericUDFToVarchar)
           || (udf instanceof GenericUDFToDecimal) || (udf instanceof GenericUDFToDate)
-          // Calcite can not specify the scale for timestamp. As a result, all
-          // the millisecond part will be lost
           || (udf instanceof GenericUDFTimestamp) || (udf instanceof GenericUDFToTimestampLocalTZ)
           || (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf)) {
         castExpr = cluster.getRexBuilder().makeAbstractCast(
@@ -701,9 +698,9 @@ public class RexNodeConverter {
       calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
       break;
     case DATE:
-      final Calendar cal = Calendar.getInstance(Locale.getDefault());
-      cal.setTime((Date) value);
-      calciteLiteral = rexBuilder.makeDateLiteral(DateString.fromCalendarFields(cal));
+      final Date date = (Date) value;
+      calciteLiteral = rexBuilder.makeDateLiteral(
+          DateString.fromDaysSinceEpoch(date.toEpochDay()));
       break;
     case TIMESTAMP:
       final TimestampString tsString;
@@ -711,9 +708,7 @@ public class RexNodeConverter {
         tsString = TimestampString.fromCalendarFields((Calendar) value);
       } else {
         final Timestamp ts = (Timestamp) value;
-        final Calendar calt = Calendar.getInstance(Locale.getDefault());
-        calt.setTimeInMillis(ts.getTime());
-        tsString = TimestampString.fromCalendarFields(calt).withNanos(ts.getNanos());
+        tsString = TimestampString.fromMillisSinceEpoch(ts.toEpochMilli()).withNanos(ts.getNanos());
       }
       // Must call makeLiteral, not makeTimestampLiteral
       // to have the RexBuilder.roundTime logic kick in

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 596edde..ebea31d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hive.ql.parse;
 import java.io.IOException;
 import java.io.Serializable;
 import java.io.UnsupportedEncodingException;
-import java.sql.Date;
+import java.text.ParseException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -43,6 +43,7 @@ import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -60,7 +61,6 @@ import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryProperties;
 import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.cache.results.CacheUsage;
-import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -96,7 +96,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentUser;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -109,9 +109,6 @@ import org.slf4j.LoggerFactory;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.ImmutableList;
 
-import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_CATALOG_NAME;
-import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog;
-
 /**
  * BaseSemanticAnalyzer.
  *
@@ -2098,14 +2095,19 @@ public abstract class BaseSemanticAnalyzer {
   private static String normalizeDateCol(
       Object colValue, String originalColSpec) throws SemanticException {
     Date value;
-    if (colValue instanceof DateWritable) {
-      value = ((DateWritable) colValue).get(false); // Time doesn't matter.
+    if (colValue instanceof DateWritableV2) {
+      value = ((DateWritableV2) colValue).get(); // Time doesn't matter.
     } else if (colValue instanceof Date) {
       value = (Date) colValue;
     } else {
       throw new SemanticException("Unexpected date type " + colValue.getClass());
     }
-    return MetaStoreUtils.PARTITION_DATE_FORMAT.get().format(value);
+    try {
+      return MetaStoreUtils.PARTITION_DATE_FORMAT.get().format(
+          MetaStoreUtils.PARTITION_DATE_FORMAT.get().parse(value.toString()));
+    } catch (ParseException e) {
+      throw new SemanticException(e);
+    }
   }
 
   protected WriteEntity toWriteEntity(String location) throws SemanticException {

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
index 036ceca..fa941a1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -19,8 +19,7 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.Timestamp;
+
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -33,10 +32,12 @@ import java.util.Stack;
 import org.apache.calcite.rel.RelNode;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang3.math.NumberUtils;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZUtil;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java
index 607545d..31c9682 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.metastore.columnstats.cache.DoubleColumnStatsDataI
 import org.apache.hadoop.hive.metastore.columnstats.cache.LongColumnStatsDataInspector;
 import org.apache.hadoop.hive.metastore.columnstats.cache.StringColumnStatsDataInspector;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -197,10 +197,10 @@ public class ColumnStatisticsObjTranslator {
       long v = ((LongObjectInspector) oi).get(o);
       statsObj.getStatsData().getDateStats().setNumDVs(v);
     } else if (fName.equals("max")) {
-      DateWritable v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
+      DateWritableV2 v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
       statsObj.getStatsData().getDateStats().setHighValue(new Date(v.getDays()));
     } else if (fName.equals("min")) {
-      DateWritable v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
+      DateWritableV2 v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
       statsObj.getStatsData().getDateStats().setLowValue(new Date(v.getDays()));
     } else if (fName.equals("ndvbitvector")) {
       PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java
index 21164b7..2fb8844 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java
@@ -25,10 +25,11 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.NoSuchElementException;
 
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.joda.time.Chronology;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
@@ -46,26 +47,22 @@ import com.google.common.collect.ImmutableMap;
 public abstract class UDFDateFloor extends UDF {
 
   private final QueryGranularity granularity;
-  private final TimestampWritable resultTS;
+  private final TimestampWritableV2 resultTS;
   private final TimestampLocalTZWritable resultTSLTZ;
 
   public UDFDateFloor(String granularity) {
     this.granularity = QueryGranularity.fromString(granularity);
-    this.resultTS = new TimestampWritable();
+    this.resultTS = new TimestampWritableV2();
     this.resultTSLTZ = new TimestampLocalTZWritable();
   }
 
-  public TimestampWritable evaluate(TimestampWritable t) {
+  public TimestampWritableV2 evaluate(TimestampWritableV2 t) {
     if (t == null) {
       return null;
     }
-    final long originalTimestamp = t.getTimestamp().getTime(); // default
-    final long originalTimestampUTC = new DateTime(originalTimestamp)
-        .withZoneRetainFields(DateTimeZone.UTC).getMillis(); // default -> utc
-    final long newTimestampUTC = granularity.truncate(originalTimestampUTC); // utc
-    final long newTimestamp = new DateTime(newTimestampUTC, DateTimeZone.UTC)
-        .withZoneRetainFields(DateTimeZone.getDefault()).getMillis(); // utc -> default
-    resultTS.setTime(newTimestamp);
+    final long originalTimestamp = t.getTimestamp().toEpochMilli();
+    final long newTimestamp = granularity.truncate(originalTimestamp);
+    resultTS.set(Timestamp.ofEpochMilli(newTimestamp));
     return resultTS;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
index f774954..72fa263 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
@@ -18,23 +18,26 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthTimestamp;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
+
+import java.util.Calendar;
+import java.util.TimeZone;
 
 /**
  * UDFDayOfMonth.
@@ -51,66 +54,81 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  30")
 @VectorizedExpressions({VectorUDFDayOfMonthDate.class, VectorUDFDayOfMonthString.class, VectorUDFDayOfMonthTimestamp.class})
 @NDV(maxNdv = 31)
-public class UDFDayOfMonth extends UDF {
-  private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
-  private final Calendar calendar = Calendar.getInstance();
-
-  private final IntWritable result = new IntWritable();
+public class UDFDayOfMonth extends GenericUDF {
 
-  public UDFDayOfMonth() {
-  }
+  private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
+  private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
+  private final IntWritable output = new IntWritable();
 
-  /**
-   * Get the day of month from a date string.
-   *
-   * @param dateString
-   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
-   *          "yyyy-MM-dd".
-   * @return an int from 1 to 31. null if the dateString is not a valid date
-   *         string.
-   */
-  public IntWritable evaluate(Text dateString) {
+  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
 
-    if (dateString == null) {
-      return null;
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    checkArgsSize(arguments, 1, 1);
+    checkArgPrimitive(arguments, 0);
+    switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
+      case INTERVAL_DAY_TIME:
+        inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME;
+        converters[0] = ObjectInspectorConverters.getConverter(
+            arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
+        break;
+      case STRING:
+      case CHAR:
+      case VARCHAR:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPLOCALTZ:
+      case VOID:
+        obtainDateConverter(arguments, 0, inputTypes, converters);
+        break;
+      default:
+        // build error message
+        StringBuilder sb = new StringBuilder();
+        sb.append(getFuncName());
+        sb.append(" does not take ");
+        sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
+        sb.append(" type");
+        throw new UDFArgumentTypeException(0, sb.toString());
     }
 
-    try {
-      Date date = formatter.parse(dateString.toString());
-      calendar.setTime(date);
-      result.set(calendar.get(Calendar.DAY_OF_MONTH));
-      return result;
-    } catch (ParseException e) {
-      return null;
-    }
+    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+    return outputOI;
   }
 
-  public IntWritable evaluate(DateWritable d) {
-    if (d == null) {
-      return null;
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    switch (inputTypes[0]) {
+      case INTERVAL_DAY_TIME:
+        HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
+        if (intervalDayTime == null) {
+          return null;
+        }
+        output.set(intervalDayTime.getDays());
+        break;
+      case STRING:
+      case CHAR:
+      case VARCHAR:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPLOCALTZ:
+      case VOID:
+        Date date = getDateValue(arguments, 0, inputTypes, converters);
+        if (date == null) {
+          return null;
+        }
+        calendar.setTimeInMillis(date.toEpochMilli());
+        output.set(calendar.get(Calendar.DAY_OF_MONTH));
     }
-
-    calendar.setTime(d.get(false)); // Time doesn't matter.
-    result.set(calendar.get(Calendar.DAY_OF_MONTH));
-    return result;
+    return output;
   }
 
-  public IntWritable evaluate(TimestampWritable t) {
-    if (t == null) {
-      return null;
-    }
-
-    calendar.setTime(t.getTimestamp());
-    result.set(calendar.get(Calendar.DAY_OF_MONTH));
-    return result;
+  @Override
+  protected String getFuncName() {
+    return "day";
   }
 
-  public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
-    if (i == null) {
-      return null;
-    }
-
-    result.set(i.getHiveIntervalDayTime().getDays());
-    return result;
+  @Override
+  public String getDisplayString(String[] children) {
+    return getStandardDisplayString(getFuncName(), children);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java
index 88e6d94..defa9d1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java
@@ -18,11 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
@@ -30,8 +26,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfWeekDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfWeekString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfWeekTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 
@@ -51,8 +47,6 @@ import org.apache.hadoop.io.Text;
 @VectorizedExpressions({VectorUDFDayOfWeekDate.class, VectorUDFDayOfWeekString.class, VectorUDFDayOfWeekTimestamp.class})
 @NDV(maxNdv = 7)
 public class UDFDayOfWeek extends UDF {
-  private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
-  private final Calendar calendar = Calendar.getInstance();
 
   private final IntWritable result = new IntWritable();
 
@@ -73,32 +67,29 @@ public class UDFDayOfWeek extends UDF {
       return null;
     }
     try {
-      Date date = formatter.parse(dateString.toString());
-      calendar.setTime(date);
-      result.set(calendar.get(Calendar.DAY_OF_WEEK));
+      Date date = Date.valueOf(dateString.toString());
+      result.set(date.getDayOfWeek());
       return result;
-    } catch (ParseException e) {
+    } catch (IllegalArgumentException e) {
       return null;
     }
   }
 
-  public IntWritable evaluate(DateWritable d) {
+  public IntWritable evaluate(DateWritableV2 d) {
     if (d == null) {
       return null;
     }
 
-    calendar.setTime(d.get(false)); // Time doesn't matter.
-    result.set(calendar.get(Calendar.DAY_OF_WEEK));
+    result.set(d.get().getDayOfWeek());
     return result;
   }
 
-  public IntWritable evaluate(TimestampWritable t) {
+  public IntWritable evaluate(TimestampWritableV2 t) {
     if (t == null) {
       return null;
     }
 
-    calendar.setTime(t.getTimestamp());
-    result.set(calendar.get(Calendar.DAY_OF_WEEK));
+    result.set(t.getTimestamp().getDayOfWeek());
     return result;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
index 8f531fd..3cee0c1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf;
 
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.TimeZone;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
@@ -119,6 +120,7 @@ public class UDFFromUnixTime extends UDF {
   private Text eval(long unixtime, Text format) {
     if (!format.equals(lastFormat)) {
       formatter = new SimpleDateFormat(format.toString());
+      formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
       lastFormat.set(format);
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
index a0c4e96..f906f36 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
@@ -18,22 +18,26 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourTimestamp;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
+
+import java.util.Calendar;
+import java.util.TimeZone;
 
 /**
  * UDFHour.
@@ -51,62 +55,82 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  12")
 @VectorizedExpressions({VectorUDFHourDate.class, VectorUDFHourString.class, VectorUDFHourTimestamp.class})
 @NDV(maxNdv = 24)
-public class UDFHour extends UDF {
-  private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-  private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
-  private final Calendar calendar = Calendar.getInstance();
+public class UDFHour extends GenericUDF {
 
-  private final IntWritable result = new IntWritable();
+  private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
+  private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
+  private final IntWritable output = new IntWritable();
 
-  public UDFHour() {
-  }
+  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
 
-  /**
-   * Get the hour from a date string.
-   *
-   * @param dateString
-   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
-   *          "yyyy-MM-dd".
-   * @return an int from 0 to 23. null if the dateString is not a valid date
-   *         string.
-   */
-  public IntWritable evaluate(Text dateString) {
 
-    if (dateString == null) {
-      return null;
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    checkArgsSize(arguments, 1, 1);
+    checkArgPrimitive(arguments, 0);
+    switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
+      case INTERVAL_DAY_TIME:
+        inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME;
+        converters[0] = ObjectInspectorConverters.getConverter(
+            arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
+        break;
+      case STRING:
+      case CHAR:
+      case VARCHAR:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPLOCALTZ:
+      case VOID:
+        obtainTimestampConverter(arguments, 0, inputTypes, converters);
+        break;
+      default:
+        // build error message
+        StringBuilder sb = new StringBuilder();
+        sb.append(getFuncName());
+        sb.append(" does not take ");
+        sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
+        sb.append(" type");
+        throw new UDFArgumentTypeException(0, sb.toString());
     }
 
-    try {
-      Date date = null;
-      try {
-        date = formatter1.parse(dateString.toString());
-      } catch (ParseException e) {
-        date = formatter2.parse(dateString.toString());
-      }
-      calendar.setTime(date);
-      result.set(calendar.get(Calendar.HOUR_OF_DAY));
-      return result;
-    } catch (ParseException e) {
-      return null;
-    }
+    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+    return outputOI;
   }
 
-  public IntWritable evaluate(TimestampWritable t) {
-    if (t == null) {
-      return null;
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    switch (inputTypes[0]) {
+      case INTERVAL_DAY_TIME:
+        HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
+        if (intervalDayTime == null) {
+          return null;
+        }
+        output.set(intervalDayTime.getHours());
+        break;
+      case STRING:
+      case CHAR:
+      case VARCHAR:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPLOCALTZ:
+      case VOID:
+        Timestamp ts = getTimestampValue(arguments, 0, converters);
+        if (ts == null) {
+          return null;
+        }
+        calendar.setTimeInMillis(ts.toEpochMilli());
+        output.set(calendar.get(Calendar.HOUR_OF_DAY));
     }
-
-    calendar.setTime(t.getTimestamp());
-    result.set(calendar.get(Calendar.HOUR_OF_DAY));
-    return result;
+    return output;
   }
 
-  public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
-    if (i == null) {
-      return null;
-    }
+  @Override
+  protected String getFuncName() {
+    return "hour";
+  }
 
-    result.set(i.getHiveIntervalDayTime().getHours());
-    return result;
+  @Override
+  public String getDisplayString(String[] children) {
+    return getStandardDisplayString(getFuncName(), children);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
index 306d458..2e62173 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
@@ -18,22 +18,26 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteTimestamp;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
+
+import java.util.Calendar;
+import java.util.TimeZone;
 
 /**
  * UDFMinute.
@@ -51,62 +55,82 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  58")
 @VectorizedExpressions({VectorUDFMinuteDate.class, VectorUDFMinuteString.class, VectorUDFMinuteTimestamp.class})
 @NDV(maxNdv = 60)
-public class UDFMinute extends UDF {
-  private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-  private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
-  private final Calendar calendar = Calendar.getInstance();
+public class UDFMinute extends GenericUDF {
 
-  private final IntWritable result = new IntWritable();
+  private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
+  private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
+  private final IntWritable output = new IntWritable();
 
-  public UDFMinute() {
-  }
+  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
 
-  /**
-   * Get the minute from a date string.
-   *
-   * @param dateString
-   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
-   *          "yyyy-MM-dd".
-   * @return an int from 0 to 59. null if the dateString is not a valid date
-   *         string.
-   */
-  public IntWritable evaluate(Text dateString) {
 
-    if (dateString == null) {
-      return null;
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    checkArgsSize(arguments, 1, 1);
+    checkArgPrimitive(arguments, 0);
+    switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
+      case INTERVAL_DAY_TIME:
+        inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME;
+        converters[0] = ObjectInspectorConverters.getConverter(
+            arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
+        break;
+      case STRING:
+      case CHAR:
+      case VARCHAR:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPLOCALTZ:
+      case VOID:
+        obtainTimestampConverter(arguments, 0, inputTypes, converters);
+        break;
+      default:
+        // build error message
+        StringBuilder sb = new StringBuilder();
+        sb.append(getFuncName());
+        sb.append(" does not take ");
+        sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
+        sb.append(" type");
+        throw new UDFArgumentTypeException(0, sb.toString());
     }
 
-    try {
-      Date date = null;
-      try {
-        date = formatter1.parse(dateString.toString());
-      } catch (ParseException e) {
-        date = formatter2.parse(dateString.toString());
-      }
-      calendar.setTime(date);
-      result.set(calendar.get(Calendar.MINUTE));
-      return result;
-    } catch (ParseException e) {
-      return null;
-    }
+    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+    return outputOI;
   }
 
-  public IntWritable evaluate(TimestampWritable t) {
-    if (t == null) {
-      return null;
+  @Override
+  public Object evaluate(GenericUDF.DeferredObject[] arguments) throws HiveException {
+    switch (inputTypes[0]) {
+      case INTERVAL_DAY_TIME:
+        HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
+        if (intervalDayTime == null) {
+          return null;
+        }
+        output.set(intervalDayTime.getMinutes());
+        break;
+      case STRING:
+      case CHAR:
+      case VARCHAR:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPLOCALTZ:
+      case VOID:
+        Timestamp ts = getTimestampValue(arguments, 0, converters);
+        if (ts == null) {
+          return null;
+        }
+        calendar.setTimeInMillis(ts.toEpochMilli());
+        output.set(calendar.get(Calendar.MINUTE));
     }
-
-    calendar.setTime(t.getTimestamp());
-    result.set(calendar.get(Calendar.MINUTE));
-    return result;
+    return output;
   }
 
-  public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
-    if (i == null) {
-      return null;
-    }
+  @Override
+  protected String getFuncName() {
+    return "minute";
+  }
 
-    result.set(i.getHiveIntervalDayTime().getMinutes());
-    return result;
+  @Override
+  public String getDisplayString(String[] children) {
+    return getStandardDisplayString(getFuncName(), children);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
index 7995934..1a8b2da 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
@@ -18,23 +18,26 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthTimestamp;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
+
+import java.util.Calendar;
+import java.util.TimeZone;
 
 /**
  * UDFMonth.
@@ -51,64 +54,82 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  7")
 @VectorizedExpressions({VectorUDFMonthDate.class, VectorUDFMonthString.class, VectorUDFMonthTimestamp.class})
 @NDV(maxNdv = 31)
-public class UDFMonth extends UDF {
-  private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
-  private final Calendar calendar = Calendar.getInstance();
+public class UDFMonth extends GenericUDF {
 
-  private final IntWritable result = new IntWritable();
+  private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
+  private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
+  private final IntWritable output = new IntWritable();
 
-  public UDFMonth() {
-  }
+  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
 
-  /**
-   * Get the month from a date string.
-   *
-   * @param dateString
-   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
-   *          "yyyy-MM-dd".
-   * @return an int from 1 to 12. null if the dateString is not a valid date
-   *         string.
-   */
-  public IntWritable evaluate(Text dateString) {
-    if (dateString == null) {
-      return null;
-    }
-    try {
-      Date date = formatter.parse(dateString.toString());
-      calendar.setTime(date);
-      result.set(1 + calendar.get(Calendar.MONTH));
-      return result;
-    } catch (ParseException e) {
-      return null;
-    }
-  }
 
-  public IntWritable evaluate(DateWritable d) {
-    if (d == null) {
-      return null;
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    checkArgsSize(arguments, 1, 1);
+    checkArgPrimitive(arguments, 0);
+    switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
+      case INTERVAL_YEAR_MONTH:
+        inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_YEAR_MONTH;
+        converters[0] = ObjectInspectorConverters.getConverter(
+            arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector);
+        break;
+      case STRING:
+      case CHAR:
+      case VARCHAR:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPLOCALTZ:
+      case VOID:
+        obtainDateConverter(arguments, 0, inputTypes, converters);
+        break;
+      default:
+        // build error message
+        StringBuilder sb = new StringBuilder();
+        sb.append(getFuncName());
+        sb.append(" does not take ");
+        sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
+        sb.append(" type");
+        throw new UDFArgumentTypeException(0, sb.toString());
     }
 
-    calendar.setTime(d.get(false));  // Time doesn't matter.
-    result.set(1 + calendar.get(Calendar.MONTH));
-    return result;
+    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+    return outputOI;
   }
 
-  public IntWritable evaluate(TimestampWritable t) {
-    if (t == null) {
-      return null;
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    switch (inputTypes[0]) {
+      case INTERVAL_YEAR_MONTH:
+        HiveIntervalYearMonth intervalYearMonth = getIntervalYearMonthValue(arguments, 0, inputTypes, converters);
+        if (intervalYearMonth == null) {
+          return null;
+        }
+        output.set(intervalYearMonth.getMonths());
+        break;
+      case STRING:
+      case CHAR:
+      case VARCHAR:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPLOCALTZ:
+      case VOID:
+        Date date = getDateValue(arguments, 0, inputTypes, converters);
+        if (date == null) {
+          return null;
+        }
+        calendar.setTimeInMillis(date.toEpochMilli());
+        output.set(1 + calendar.get(Calendar.MONTH));
     }
-
-    calendar.setTime(t.getTimestamp());
-    result.set(1 + calendar.get(Calendar.MONTH));
-    return result;
+    return output;
   }
 
-  public IntWritable evaluate(HiveIntervalYearMonthWritable i) {
-    if (i == null) {
-      return null;
-    }
+  @Override
+  protected String getFuncName() {
+    return "month";
+  }
 
-    result.set(i.getHiveIntervalYearMonth().getMonths());
-    return result;
+  @Override
+  public String getDisplayString(String[] children) {
+    return getStandardDisplayString(getFuncName(), children);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
index 5bf8b24..31a8529 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
@@ -18,25 +18,27 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondTimestamp;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hive.common.util.DateUtils;
+
+import java.util.Calendar;
+import java.util.TimeZone;
+
 
 /**
  * UDFSecond.
@@ -54,64 +56,82 @@ import org.apache.hive.common.util.DateUtils;
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  59")
 @VectorizedExpressions({VectorUDFSecondDate.class, VectorUDFSecondString.class, VectorUDFSecondTimestamp.class})
 @NDV(maxNdv = 60)
-public class UDFSecond extends UDF {
-  private final SimpleDateFormat formatter1 = new SimpleDateFormat(
-      "yyyy-MM-dd HH:mm:ss");
-  private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
-  private final Calendar calendar = Calendar.getInstance();
+public class UDFSecond extends GenericUDF {
 
-  private final IntWritable result = new IntWritable();
+  private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
+  private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
+  private final IntWritable output = new IntWritable();
 
-  public UDFSecond() {
-  }
+  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
 
-  /**
-   * Get the minute from a date string.
-   *
-   * @param dateString
-   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
-   *          "yyyy-MM-dd".
-   * @return an int from 0 to 59. null if the dateString is not a valid date
-   *         string.
-   */
-  public IntWritable evaluate(Text dateString) {
 
-    if (dateString == null) {
-      return null;
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    checkArgsSize(arguments, 1, 1);
+    checkArgPrimitive(arguments, 0);
+    switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
+      case INTERVAL_DAY_TIME:
+        inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME;
+        converters[0] = ObjectInspectorConverters.getConverter(
+            arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
+        break;
+      case STRING:
+      case CHAR:
+      case VARCHAR:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPLOCALTZ:
+      case VOID:
+        obtainTimestampConverter(arguments, 0, inputTypes, converters);
+        break;
+      default:
+        // build error message
+        StringBuilder sb = new StringBuilder();
+        sb.append(getFuncName());
+        sb.append(" does not take ");
+        sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
+        sb.append(" type");
+        throw new UDFArgumentTypeException(0, sb.toString());
     }
 
-    try {
-      Date date = null;
-      try {
-        date = formatter1.parse(dateString.toString());
-      } catch (ParseException e) {
-        date = formatter2.parse(dateString.toString());
-      }
-      calendar.setTime(date);
-      result.set(calendar.get(Calendar.SECOND));
-      return result;
-    } catch (ParseException e) {
-      return null;
-    }
+    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+    return outputOI;
   }
 
-  public IntWritable evaluate(TimestampWritable t) {
-    if (t == null) {
-      return null;
+  @Override
+  public Object evaluate(GenericUDF.DeferredObject[] arguments) throws HiveException {
+    switch (inputTypes[0]) {
+      case INTERVAL_DAY_TIME:
+        HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
+        if (intervalDayTime == null) {
+          return null;
+        }
+        output.set(intervalDayTime.getSeconds());
+        break;
+      case STRING:
+      case CHAR:
+      case VARCHAR:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPLOCALTZ:
+      case VOID:
+        Timestamp ts = getTimestampValue(arguments, 0, converters);
+        if (ts == null) {
+          return null;
+        }
+        calendar.setTimeInMillis(ts.toEpochMilli());
+        output.set(calendar.get(Calendar.SECOND));
     }
-
-    calendar.setTime(t.getTimestamp());
-    result.set(calendar.get(Calendar.SECOND));
-    return result;
+    return output;
   }
 
-  public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
-    if (i == null) {
-      return null;
-    }
+  @Override
+  protected String getFuncName() {
+    return "second";
+  }
 
-    HiveIntervalDayTime idt = i.getHiveIntervalDayTime();
-    result.set(idt.getSeconds());
-    return result;
+  @Override
+  public String getDisplayString(String[] children) {
+    return getStandardDisplayString(getFuncName(), children);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
index 3ac7a06..a7f4bf1 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
@@ -30,11 +30,11 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToBooleanVi
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDateToBoolean;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToBoolean;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -179,12 +179,12 @@ public class UDFToBoolean extends UDF {
     return booleanWritable;
   }
 
-  public BooleanWritable evaluate(DateWritable d) {
+  public BooleanWritable evaluate(DateWritableV2 d) {
     // date value to boolean doesn't make any sense.
     return null;
   }
 
-  public BooleanWritable evaluate(TimestampWritable i) {
+  public BooleanWritable evaluate(TimestampWritableV2 i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
index 1128b32..8f4ec3b 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.lazy.LazyByte;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.io.BooleanWritable;
@@ -183,7 +183,7 @@ public class UDFToByte extends UDF {
     }
   }
 
-  public ByteWritable evaluate(TimestampWritable i) {
+  public ByteWritable evaluate(TimestampWritableV2 i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
index a8de3d5..7a01452 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -180,7 +180,7 @@ public class UDFToDouble extends UDF {
     }
   }
 
-  public DoubleWritable evaluate(TimestampWritable i) {
+  public DoubleWritable evaluate(TimestampWritableV2 i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
index 2872ff2..451b45f 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -181,7 +181,7 @@ public class UDFToFloat extends UDF {
     }
   }
 
-  public FloatWritable evaluate(TimestampWritable i) {
+  public FloatWritable evaluate(TimestampWritableV2 i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
index 748a688..4fe9c32 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.io.BooleanWritable;
@@ -193,7 +193,7 @@ public class UDFToInteger extends UDF {
    *          The Timestamp value to convert
    * @return IntWritable
    */
-  public IntWritable evaluate(TimestampWritable i) {
+  public IntWritable evaluate(TimestampWritableV2 i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
index e286652..b31eeb0 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.lazy.LazyLong;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.io.BooleanWritable;
@@ -195,7 +195,7 @@ public class UDFToLong extends UDF {
     }
   }
 
-  public LongWritable evaluate(TimestampWritable i) {
+  public LongWritable evaluate(TimestampWritableV2 i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
index e003ff3..315789c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong;
@@ -29,7 +28,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.lazy.LazyShort;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.io.BooleanWritable;
@@ -185,7 +184,7 @@ public class UDFToShort extends UDF {
     }
   }
 
-  public ShortWritable evaluate(TimestampWritable i) {
+  public ShortWritable evaluate(TimestampWritableV2 i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
index 557cb1c..a16d429 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
@@ -21,12 +21,12 @@ package org.apache.hadoop.hive.ql.udf;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.LazyLong;
 import org.apache.hadoop.io.BooleanWritable;
@@ -135,7 +135,7 @@ public class UDFToString extends UDF {
       return i;
   }
 
-  public Text evaluate(DateWritable d) {
+  public Text evaluate(DateWritableV2 d) {
     if (d == null) {
       return null;
     } else {
@@ -144,7 +144,7 @@ public class UDFToString extends UDF {
     }
   }
 
-  public Text evaluate(TimestampWritable i) {
+  public Text evaluate(TimestampWritableV2 i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
index 18ed52d..18ca9a7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
@@ -18,11 +18,11 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
 import java.util.Calendar;
-import java.util.Date;
+import java.util.TimeZone;
 
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
@@ -30,8 +30,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearDate
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 
@@ -49,11 +49,12 @@ import org.apache.hadoop.io.Text;
 @VectorizedExpressions({VectorUDFWeekOfYearDate.class, VectorUDFWeekOfYearString.class, VectorUDFWeekOfYearTimestamp.class})
 @NDV(maxNdv = 52)
 public class UDFWeekOfYear extends UDF {
-  private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
-  private final Calendar calendar = Calendar.getInstance();
 
   private final IntWritable result = new IntWritable();
 
+  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+
+
   public UDFWeekOfYear() {
     calendar.setFirstDayOfWeek(Calendar.MONDAY);
     calendar.setMinimalDaysInFirstWeek(4);
@@ -73,31 +74,32 @@ public class UDFWeekOfYear extends UDF {
       return null;
     }
     try {
-      Date date = formatter.parse(dateString.toString());
-      calendar.setTime(date);
+      Date date = Date.valueOf(dateString.toString());
+      calendar.setTimeInMillis(date.toEpochMilli());
       result.set(calendar.get(Calendar.WEEK_OF_YEAR));
       return result;
-    } catch (ParseException e) {
+    } catch (IllegalArgumentException e) {
       return null;
     }
   }
 
-  public IntWritable evaluate(DateWritable d) {
+  public IntWritable evaluate(DateWritableV2 d) {
     if (d == null) {
       return null;
     }
-
-    calendar.setTime(d.get(false));  // Time doesn't matter.
+    Date date = d.get();
+    calendar.setTimeInMillis(date.toEpochMilli());
     result.set(calendar.get(Calendar.WEEK_OF_YEAR));
     return result;
   }
 
-  public IntWritable evaluate(TimestampWritable t) {
+  public IntWritable evaluate(TimestampWritableV2 t) {
     if (t == null) {
       return null;
     }
 
-    calendar.setTime(t.getTimestamp());
+    Timestamp ts = t.getTimestamp();
+    calendar.setTimeInMillis(ts.toEpochMilli());
     result.set(calendar.get(Calendar.WEEK_OF_YEAR));
     return result;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
index 8417591..fcbb57f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
@@ -18,23 +18,26 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearTimestamp;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
+
+import java.util.Calendar;
+import java.util.TimeZone;
 
 /**
  * UDFYear.
@@ -51,66 +54,82 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  2009")
 @VectorizedExpressions({VectorUDFYearDate.class, VectorUDFYearString.class, VectorUDFYearTimestamp.class})
 @NDV(maxNdv = 20) // although technically its unbounded, its unlikely we will ever see ndv > 20
-public class UDFYear extends UDF {
-  private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
-  private final Calendar calendar = Calendar.getInstance();
+public class UDFYear extends GenericUDF {
 
-  private final IntWritable result = new IntWritable();
+  private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
+  private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
+  private final IntWritable output = new IntWritable();
 
-  public UDFYear() {
-  }
+  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
 
-  /**
-   * Get the year from a date string.
-   *
-   * @param dateString
-   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
-   *          "yyyy-MM-dd".
-   * @return an int from 1 to 12. null if the dateString is not a valid date
-   *         string.
-   */
-  public IntWritable evaluate(Text dateString) {
 
-    if (dateString == null) {
-      return null;
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    checkArgsSize(arguments, 1, 1);
+    checkArgPrimitive(arguments, 0);
+    switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
+      case INTERVAL_YEAR_MONTH:
+        inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_YEAR_MONTH;
+        converters[0] = ObjectInspectorConverters.getConverter(
+            arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector);
+        break;
+      case STRING:
+      case CHAR:
+      case VARCHAR:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPLOCALTZ:
+      case VOID:
+        obtainDateConverter(arguments, 0, inputTypes, converters);
+        break;
+      default:
+        // build error message
+        StringBuilder sb = new StringBuilder();
+        sb.append(getFuncName());
+        sb.append(" does not take ");
+        sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
+        sb.append(" type");
+        throw new UDFArgumentTypeException(0, sb.toString());
     }
 
-    try {
-      Date date = formatter.parse(dateString.toString());
-      calendar.setTime(date);
-      result.set(calendar.get(Calendar.YEAR));
-      return result;
-    } catch (ParseException e) {
-      return null;
-    }
+    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+    return outputOI;
   }
 
-  public IntWritable evaluate(DateWritable d) {
-    if (d == null) {
-      return null;
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    switch (inputTypes[0]) {
+      case INTERVAL_YEAR_MONTH:
+        HiveIntervalYearMonth intervalYearMonth = getIntervalYearMonthValue(arguments, 0, inputTypes, converters);
+        if (intervalYearMonth == null) {
+          return null;
+        }
+        output.set(intervalYearMonth.getYears());
+        break;
+      case STRING:
+      case CHAR:
+      case VARCHAR:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPLOCALTZ:
+      case VOID:
+        Date date = getDateValue(arguments, 0, inputTypes, converters);
+        if (date == null) {
+          return null;
+        }
+        calendar.setTimeInMillis(date.toEpochMilli());
+        output.set(calendar.get(Calendar.YEAR));
     }
-
-    calendar.setTime(d.get(false));  // Time doesn't matter.
-    result.set(calendar.get(Calendar.YEAR));
-    return result;
+    return output;
   }
 
-  public IntWritable evaluate(TimestampWritable t) {
-    if (t == null) {
-      return null;
-    }
-
-    calendar.setTime(t.getTimestamp());
-    result.set(calendar.get(Calendar.YEAR));
-    return result;
+  @Override
+  protected String getFuncName() {
+    return "year";
   }
 
-  public IntWritable evaluate(HiveIntervalYearMonthWritable i) {
-    if (i == null) {
-      return null;
-    }
-
-    result.set(i.getHiveIntervalYearMonth().getYears());
-    return result;
+  @Override
+  public String getDisplayString(String[] children) {
+    return getStandardDisplayString(getFuncName(), children);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java
index 5c67242..d69a4f7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -34,8 +35,6 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
-import java.sql.Date;
-
 
 public abstract class BaseMaskUDF extends GenericUDF {
   private static final Log LOG = LogFactory.getLog(BaseMaskUDF.class);
@@ -228,13 +227,13 @@ class ByteTransformerAdapter extends AbstractTransformerAdapter {
 
 class DateTransformerAdapter extends AbstractTransformerAdapter {
   final DateObjectInspector columnType;
-  final DateWritable        writable;
+  final DateWritableV2 writable;
 
   public DateTransformerAdapter(DateObjectInspector columnType, AbstractTransformer transformer) {
-    this(columnType, transformer, new DateWritable());
+    this(columnType, transformer, new DateWritableV2());
   }
 
-  public DateTransformerAdapter(DateObjectInspector columnType, AbstractTransformer transformer, DateWritable writable) {
+  public DateTransformerAdapter(DateObjectInspector columnType, AbstractTransformer transformer, DateWritableV2 writable) {
     super(transformer);
 
     this.columnType = columnType;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
index ca8bc8f..a8bcc97 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
@@ -20,19 +20,19 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.SelectOperator;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.*;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ColStatistics;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
 import org.apache.hadoop.hive.ql.plan.Statistics;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -46,7 +46,6 @@ import org.apache.hive.common.util.BloomKFilter;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
-import java.sql.Timestamp;
 import java.util.List;
 
 /**
@@ -197,14 +196,14 @@ public class GenericUDAFBloomFilter implements GenericUDAFResolver2 {
           bf.addBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx);
           break;
         case DATE:
-          DateWritable vDate = ((DateObjectInspector)inputOI).
+          DateWritableV2 vDate = ((DateObjectInspector)inputOI).
                   getPrimitiveWritableObject(parameters[0]);
           bf.addLong(vDate.getDays());
           break;
         case TIMESTAMP:
           Timestamp vTimeStamp = ((TimestampObjectInspector)inputOI).
                   getPrimitiveJavaObject(parameters[0]);
-          bf.addLong(vTimeStamp.getTime());
+          bf.addLong(vTimeStamp.toEpochMilli());
           break;
         case CHAR:
           Text vChar = ((HiveCharObjectInspector)inputOI).

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
index 2267589..dd365dd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -1297,7 +1297,7 @@ public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver {
    * High/low value will be saved in stats DB as long value representing days since epoch.
    */
   public static class GenericUDAFDateStatsEvaluator
-      extends GenericUDAFNumericStatsEvaluator<DateWritable, DateObjectInspector> {
+      extends GenericUDAFNumericStatsEvaluator<DateWritableV2, DateObjectInspector> {
 
     @Override
     protected DateObjectInspector getValueObjectInspector() {
@@ -1319,8 +1319,8 @@ public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver {
 
       @Override
       protected void update(Object p, PrimitiveObjectInspector inputOI) {
-        // DateWritable is mutable, DateStatsAgg needs its own copy
-        DateWritable v = new DateWritable((DateWritable) inputOI.getPrimitiveWritableObject(p));
+        // DateWritableV2 is mutable, DateStatsAgg needs its own copy
+        DateWritableV2 v = new DateWritableV2((DateWritableV2) inputOI.getPrimitiveWritableObject(p));
 
         //Update min counter if new value is less than min seen so far
         if (min == null || v.compareTo(min) < 0) {
@@ -1338,8 +1338,8 @@ public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver {
       protected void updateMin(Object minValue, DateObjectInspector minFieldOI) {
         if ((minValue != null) && (min == null ||
             min.compareTo(minFieldOI.getPrimitiveWritableObject(minValue)) > 0)) {
-          // DateWritable is mutable, DateStatsAgg needs its own copy
-          min = new DateWritable(minFieldOI.getPrimitiveWritableObject(minValue));
+          // DateWritableV2 is mutable, DateStatsAgg needs its own copy
+          min = new DateWritableV2(minFieldOI.getPrimitiveWritableObject(minValue));
         }
       }
 
@@ -1347,8 +1347,8 @@ public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver {
       protected void updateMax(Object maxValue, DateObjectInspector maxFieldOI) {
         if ((maxValue != null) && (max == null ||
             max.compareTo(maxFieldOI.getPrimitiveWritableObject(maxValue)) < 0)) {
-          // DateWritable is mutable, DateStatsAgg needs its own copy
-          max = new DateWritable(maxFieldOI.getPrimitiveWritableObject(maxValue));
+          // DateWritableV2 is mutable, DateStatsAgg needs its own copy
+          max = new DateWritableV2(maxFieldOI.getPrimitiveWritableObject(maxValue));
         }
       }
     };